Remove parameter keep_aligning from get_inner_reference.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "stor-layout.h"
50 #include "calls.h"
51 #include "tree-iterator.h"
52 #include "realmpfr.h"
53 #include "rtl.h"
54 #include "expr.h"
55 #include "tm_p.h"
56 #include "target.h"
57 #include "diagnostic-core.h"
58 #include "intl.h"
59 #include "langhooks.h"
60 #include "md5.h"
61 #include "basic-block.h"
62 #include "tree-ssa-alias.h"
63 #include "internal-fn.h"
64 #include "tree-eh.h"
65 #include "gimple-expr.h"
66 #include "is-a.h"
67 #include "gimple.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
71
72 /* Nonzero if we are folding constants inside an initializer; zero
73 otherwise. */
74 int folding_initializer = 0;
75
76 /* The following constants represent a bit based encoding of GCC's
77 comparison operators. This encoding simplifies transformations
78 on relational comparison operators, such as AND and OR. */
79 enum comparison_code {
80 COMPCODE_FALSE = 0,
81 COMPCODE_LT = 1,
82 COMPCODE_EQ = 2,
83 COMPCODE_LE = 3,
84 COMPCODE_GT = 4,
85 COMPCODE_LTGT = 5,
86 COMPCODE_GE = 6,
87 COMPCODE_ORD = 7,
88 COMPCODE_UNORD = 8,
89 COMPCODE_UNLT = 9,
90 COMPCODE_UNEQ = 10,
91 COMPCODE_UNLE = 11,
92 COMPCODE_UNGT = 12,
93 COMPCODE_NE = 13,
94 COMPCODE_UNGE = 14,
95 COMPCODE_TRUE = 15
96 };
97
98 static bool negate_mathfn_p (enum built_in_function);
99 static bool negate_expr_p (tree);
100 static tree negate_expr (tree);
101 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
102 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
103 static tree const_binop (enum tree_code, tree, tree);
104 static enum comparison_code comparison_to_compcode (enum tree_code);
105 static enum tree_code compcode_to_comparison (enum comparison_code);
106 static int operand_equal_for_comparison_p (tree, tree, tree);
107 static int twoval_comparison_p (tree, tree *, tree *, int *);
108 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
109 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
110 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
111 static tree make_bit_field_ref (location_t, tree, tree,
112 HOST_WIDE_INT, HOST_WIDE_INT, int);
113 static tree optimize_bit_field_compare (location_t, enum tree_code,
114 tree, tree, tree);
115 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
116 HOST_WIDE_INT *,
117 enum machine_mode *, int *, int *,
118 tree *, tree *);
119 static int all_ones_mask_p (const_tree, int);
120 static tree sign_bit_p (tree, const_tree);
121 static int simple_operand_p (const_tree);
122 static bool simple_operand_p_2 (tree);
123 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
124 static tree range_predecessor (tree);
125 static tree range_successor (tree);
126 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
127 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
128 static tree unextend (tree, int, int, tree);
129 static tree optimize_minmax_comparison (location_t, enum tree_code,
130 tree, tree, tree);
131 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
132 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
133 static tree fold_binary_op_with_conditional_arg (location_t,
134 enum tree_code, tree,
135 tree, tree,
136 tree, tree, int);
137 static tree fold_mathfn_compare (location_t,
138 enum built_in_function, enum tree_code,
139 tree, tree, tree);
140 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
141 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
142 static bool reorder_operands_p (const_tree, const_tree);
143 static tree fold_negate_const (tree, tree);
144 static tree fold_not_const (const_tree, tree);
145 static tree fold_relational_const (enum tree_code, tree, tree, tree);
146 static tree fold_convert_const (enum tree_code, tree, tree);
147
148 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
149 Otherwise, return LOC. */
150
151 static location_t
152 expr_location_or (tree t, location_t loc)
153 {
154 location_t tloc = EXPR_LOCATION (t);
155 return tloc == UNKNOWN_LOCATION ? loc : tloc;
156 }
157
158 /* Similar to protected_set_expr_location, but never modify x in place,
159 if location can and needs to be set, unshare it. */
160
161 static inline tree
162 protected_set_expr_location_unshare (tree x, location_t loc)
163 {
164 if (CAN_HAVE_LOCATION_P (x)
165 && EXPR_LOCATION (x) != loc
166 && !(TREE_CODE (x) == SAVE_EXPR
167 || TREE_CODE (x) == TARGET_EXPR
168 || TREE_CODE (x) == BIND_EXPR))
169 {
170 x = copy_node (x);
171 SET_EXPR_LOCATION (x, loc);
172 }
173 return x;
174 }
175 \f
176 /* If ARG2 divides ARG1 with zero remainder, carries out the division
177 of type CODE and returns the quotient.
178 Otherwise returns NULL_TREE. */
179
180 tree
181 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
182 {
183 double_int quo, rem;
184 int uns;
185
186 /* The sign of the division is according to operand two, that
187 does the correct thing for POINTER_PLUS_EXPR where we want
188 a signed division. */
189 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
190
191 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
192 uns, code, &rem);
193
194 if (rem.is_zero ())
195 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
196
197 return NULL_TREE;
198 }
199 \f
200 /* This is nonzero if we should defer warnings about undefined
201 overflow. This facility exists because these warnings are a
202 special case. The code to estimate loop iterations does not want
203 to issue any warnings, since it works with expressions which do not
204 occur in user code. Various bits of cleanup code call fold(), but
205 only use the result if it has certain characteristics (e.g., is a
206 constant); that code only wants to issue a warning if the result is
207 used. */
208
209 static int fold_deferring_overflow_warnings;
210
211 /* If a warning about undefined overflow is deferred, this is the
212 warning. Note that this may cause us to turn two warnings into
213 one, but that is fine since it is sufficient to only give one
214 warning per expression. */
215
216 static const char* fold_deferred_overflow_warning;
217
218 /* If a warning about undefined overflow is deferred, this is the
219 level at which the warning should be emitted. */
220
221 static enum warn_strict_overflow_code fold_deferred_overflow_code;
222
223 /* Start deferring overflow warnings. We could use a stack here to
224 permit nested calls, but at present it is not necessary. */
225
226 void
227 fold_defer_overflow_warnings (void)
228 {
229 ++fold_deferring_overflow_warnings;
230 }
231
232 /* Stop deferring overflow warnings. If there is a pending warning,
233 and ISSUE is true, then issue the warning if appropriate. STMT is
234 the statement with which the warning should be associated (used for
235 location information); STMT may be NULL. CODE is the level of the
236 warning--a warn_strict_overflow_code value. This function will use
237 the smaller of CODE and the deferred code when deciding whether to
238 issue the warning. CODE may be zero to mean to always use the
239 deferred code. */
240
241 void
242 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
243 {
244 const char *warnmsg;
245 location_t locus;
246
247 gcc_assert (fold_deferring_overflow_warnings > 0);
248 --fold_deferring_overflow_warnings;
249 if (fold_deferring_overflow_warnings > 0)
250 {
251 if (fold_deferred_overflow_warning != NULL
252 && code != 0
253 && code < (int) fold_deferred_overflow_code)
254 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
255 return;
256 }
257
258 warnmsg = fold_deferred_overflow_warning;
259 fold_deferred_overflow_warning = NULL;
260
261 if (!issue || warnmsg == NULL)
262 return;
263
264 if (gimple_no_warning_p (stmt))
265 return;
266
267 /* Use the smallest code level when deciding to issue the
268 warning. */
269 if (code == 0 || code > (int) fold_deferred_overflow_code)
270 code = fold_deferred_overflow_code;
271
272 if (!issue_strict_overflow_warning (code))
273 return;
274
275 if (stmt == NULL)
276 locus = input_location;
277 else
278 locus = gimple_location (stmt);
279 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
280 }
281
282 /* Stop deferring overflow warnings, ignoring any deferred
283 warnings. */
284
285 void
286 fold_undefer_and_ignore_overflow_warnings (void)
287 {
288 fold_undefer_overflow_warnings (false, NULL, 0);
289 }
290
291 /* Whether we are deferring overflow warnings. */
292
293 bool
294 fold_deferring_overflow_warnings_p (void)
295 {
296 return fold_deferring_overflow_warnings > 0;
297 }
298
299 /* This is called when we fold something based on the fact that signed
300 overflow is undefined. */
301
302 static void
303 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
304 {
305 if (fold_deferring_overflow_warnings > 0)
306 {
307 if (fold_deferred_overflow_warning == NULL
308 || wc < fold_deferred_overflow_code)
309 {
310 fold_deferred_overflow_warning = gmsgid;
311 fold_deferred_overflow_code = wc;
312 }
313 }
314 else if (issue_strict_overflow_warning (wc))
315 warning (OPT_Wstrict_overflow, gmsgid);
316 }
317 \f
318 /* Return true if the built-in mathematical function specified by CODE
319 is odd, i.e. -f(x) == f(-x). */
320
321 static bool
322 negate_mathfn_p (enum built_in_function code)
323 {
324 switch (code)
325 {
326 CASE_FLT_FN (BUILT_IN_ASIN):
327 CASE_FLT_FN (BUILT_IN_ASINH):
328 CASE_FLT_FN (BUILT_IN_ATAN):
329 CASE_FLT_FN (BUILT_IN_ATANH):
330 CASE_FLT_FN (BUILT_IN_CASIN):
331 CASE_FLT_FN (BUILT_IN_CASINH):
332 CASE_FLT_FN (BUILT_IN_CATAN):
333 CASE_FLT_FN (BUILT_IN_CATANH):
334 CASE_FLT_FN (BUILT_IN_CBRT):
335 CASE_FLT_FN (BUILT_IN_CPROJ):
336 CASE_FLT_FN (BUILT_IN_CSIN):
337 CASE_FLT_FN (BUILT_IN_CSINH):
338 CASE_FLT_FN (BUILT_IN_CTAN):
339 CASE_FLT_FN (BUILT_IN_CTANH):
340 CASE_FLT_FN (BUILT_IN_ERF):
341 CASE_FLT_FN (BUILT_IN_LLROUND):
342 CASE_FLT_FN (BUILT_IN_LROUND):
343 CASE_FLT_FN (BUILT_IN_ROUND):
344 CASE_FLT_FN (BUILT_IN_SIN):
345 CASE_FLT_FN (BUILT_IN_SINH):
346 CASE_FLT_FN (BUILT_IN_TAN):
347 CASE_FLT_FN (BUILT_IN_TANH):
348 CASE_FLT_FN (BUILT_IN_TRUNC):
349 return true;
350
351 CASE_FLT_FN (BUILT_IN_LLRINT):
352 CASE_FLT_FN (BUILT_IN_LRINT):
353 CASE_FLT_FN (BUILT_IN_NEARBYINT):
354 CASE_FLT_FN (BUILT_IN_RINT):
355 return !flag_rounding_math;
356
357 default:
358 break;
359 }
360 return false;
361 }
362
363 /* Check whether we may negate an integer constant T without causing
364 overflow. */
365
366 bool
367 may_negate_without_overflow_p (const_tree t)
368 {
369 unsigned HOST_WIDE_INT val;
370 unsigned int prec;
371 tree type;
372
373 gcc_assert (TREE_CODE (t) == INTEGER_CST);
374
375 type = TREE_TYPE (t);
376 if (TYPE_UNSIGNED (type))
377 return false;
378
379 prec = TYPE_PRECISION (type);
380 if (prec > HOST_BITS_PER_WIDE_INT)
381 {
382 if (TREE_INT_CST_LOW (t) != 0)
383 return true;
384 prec -= HOST_BITS_PER_WIDE_INT;
385 val = TREE_INT_CST_HIGH (t);
386 }
387 else
388 val = TREE_INT_CST_LOW (t);
389 if (prec < HOST_BITS_PER_WIDE_INT)
390 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
391 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
392 }
393
394 /* Determine whether an expression T can be cheaply negated using
395 the function negate_expr without introducing undefined overflow. */
396
397 static bool
398 negate_expr_p (tree t)
399 {
400 tree type;
401
402 if (t == 0)
403 return false;
404
405 type = TREE_TYPE (t);
406
407 STRIP_SIGN_NOPS (t);
408 switch (TREE_CODE (t))
409 {
410 case INTEGER_CST:
411 if (TYPE_OVERFLOW_WRAPS (type))
412 return true;
413
414 /* Check that -CST will not overflow type. */
415 return may_negate_without_overflow_p (t);
416 case BIT_NOT_EXPR:
417 return (INTEGRAL_TYPE_P (type)
418 && TYPE_OVERFLOW_WRAPS (type));
419
420 case FIXED_CST:
421 case NEGATE_EXPR:
422 return true;
423
424 case REAL_CST:
425 /* We want to canonicalize to positive real constants. Pretend
426 that only negative ones can be easily negated. */
427 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
428
429 case COMPLEX_CST:
430 return negate_expr_p (TREE_REALPART (t))
431 && negate_expr_p (TREE_IMAGPART (t));
432
433 case VECTOR_CST:
434 {
435 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
436 return true;
437
438 int count = TYPE_VECTOR_SUBPARTS (type), i;
439
440 for (i = 0; i < count; i++)
441 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
442 return false;
443
444 return true;
445 }
446
447 case COMPLEX_EXPR:
448 return negate_expr_p (TREE_OPERAND (t, 0))
449 && negate_expr_p (TREE_OPERAND (t, 1));
450
451 case CONJ_EXPR:
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case PLUS_EXPR:
455 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
456 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
457 return false;
458 /* -(A + B) -> (-B) - A. */
459 if (negate_expr_p (TREE_OPERAND (t, 1))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1)))
462 return true;
463 /* -(A + B) -> (-A) - B. */
464 return negate_expr_p (TREE_OPERAND (t, 0));
465
466 case MINUS_EXPR:
467 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
468 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
469 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1));
472
473 case MULT_EXPR:
474 if (TYPE_UNSIGNED (TREE_TYPE (t)))
475 break;
476
477 /* Fall through. */
478
479 case RDIV_EXPR:
480 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
481 return negate_expr_p (TREE_OPERAND (t, 1))
482 || negate_expr_p (TREE_OPERAND (t, 0));
483 break;
484
485 case TRUNC_DIV_EXPR:
486 case ROUND_DIV_EXPR:
487 case FLOOR_DIV_EXPR:
488 case CEIL_DIV_EXPR:
489 case EXACT_DIV_EXPR:
490 /* In general we can't negate A / B, because if A is INT_MIN and
491 B is 1, we may turn this into INT_MIN / -1 which is undefined
492 and actually traps on some architectures. But if overflow is
493 undefined, we can negate, because - (INT_MIN / 1) is an
494 overflow. */
495 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
496 {
497 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
498 break;
499 /* If overflow is undefined then we have to be careful because
500 we ask whether it's ok to associate the negate with the
501 division which is not ok for example for
502 -((a - b) / c) where (-(a - b)) / c may invoke undefined
503 overflow because of negating INT_MIN. So do not use
504 negate_expr_p here but open-code the two important cases. */
505 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
506 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
507 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
508 return true;
509 }
510 else if (negate_expr_p (TREE_OPERAND (t, 0)))
511 return true;
512 return negate_expr_p (TREE_OPERAND (t, 1));
513
514 case NOP_EXPR:
515 /* Negate -((double)float) as (double)(-float). */
516 if (TREE_CODE (type) == REAL_TYPE)
517 {
518 tree tem = strip_float_extensions (t);
519 if (tem != t)
520 return negate_expr_p (tem);
521 }
522 break;
523
524 case CALL_EXPR:
525 /* Negate -f(x) as f(-x). */
526 if (negate_mathfn_p (builtin_mathfn_code (t)))
527 return negate_expr_p (CALL_EXPR_ARG (t, 0));
528 break;
529
530 case RSHIFT_EXPR:
531 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
532 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
533 {
534 tree op1 = TREE_OPERAND (t, 1);
535 if (TREE_INT_CST_HIGH (op1) == 0
536 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
537 == TREE_INT_CST_LOW (op1))
538 return true;
539 }
540 break;
541
542 default:
543 break;
544 }
545 return false;
546 }
547
548 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
549 simplification is possible.
550 If negate_expr_p would return true for T, NULL_TREE will never be
551 returned. */
552
553 static tree
554 fold_negate_expr (location_t loc, tree t)
555 {
556 tree type = TREE_TYPE (t);
557 tree tem;
558
559 switch (TREE_CODE (t))
560 {
561 /* Convert - (~A) to A + 1. */
562 case BIT_NOT_EXPR:
563 if (INTEGRAL_TYPE_P (type))
564 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
565 build_one_cst (type));
566 break;
567
568 case INTEGER_CST:
569 tem = fold_negate_const (t, type);
570 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
571 || !TYPE_OVERFLOW_TRAPS (type))
572 return tem;
573 break;
574
575 case REAL_CST:
576 tem = fold_negate_const (t, type);
577 /* Two's complement FP formats, such as c4x, may overflow. */
578 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
579 return tem;
580 break;
581
582 case FIXED_CST:
583 tem = fold_negate_const (t, type);
584 return tem;
585
586 case COMPLEX_CST:
587 {
588 tree rpart = negate_expr (TREE_REALPART (t));
589 tree ipart = negate_expr (TREE_IMAGPART (t));
590
591 if ((TREE_CODE (rpart) == REAL_CST
592 && TREE_CODE (ipart) == REAL_CST)
593 || (TREE_CODE (rpart) == INTEGER_CST
594 && TREE_CODE (ipart) == INTEGER_CST))
595 return build_complex (type, rpart, ipart);
596 }
597 break;
598
599 case VECTOR_CST:
600 {
601 int count = TYPE_VECTOR_SUBPARTS (type), i;
602 tree *elts = XALLOCAVEC (tree, count);
603
604 for (i = 0; i < count; i++)
605 {
606 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
607 if (elts[i] == NULL_TREE)
608 return NULL_TREE;
609 }
610
611 return build_vector (type, elts);
612 }
613
614 case COMPLEX_EXPR:
615 if (negate_expr_p (t))
616 return fold_build2_loc (loc, COMPLEX_EXPR, type,
617 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
618 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
619 break;
620
621 case CONJ_EXPR:
622 if (negate_expr_p (t))
623 return fold_build1_loc (loc, CONJ_EXPR, type,
624 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
625 break;
626
627 case NEGATE_EXPR:
628 return TREE_OPERAND (t, 0);
629
630 case PLUS_EXPR:
631 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
632 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
633 {
634 /* -(A + B) -> (-B) - A. */
635 if (negate_expr_p (TREE_OPERAND (t, 1))
636 && reorder_operands_p (TREE_OPERAND (t, 0),
637 TREE_OPERAND (t, 1)))
638 {
639 tem = negate_expr (TREE_OPERAND (t, 1));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 0));
642 }
643
644 /* -(A + B) -> (-A) - B. */
645 if (negate_expr_p (TREE_OPERAND (t, 0)))
646 {
647 tem = negate_expr (TREE_OPERAND (t, 0));
648 return fold_build2_loc (loc, MINUS_EXPR, type,
649 tem, TREE_OPERAND (t, 1));
650 }
651 }
652 break;
653
654 case MINUS_EXPR:
655 /* - (A - B) -> B - A */
656 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
657 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
658 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
659 return fold_build2_loc (loc, MINUS_EXPR, type,
660 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
661 break;
662
663 case MULT_EXPR:
664 if (TYPE_UNSIGNED (type))
665 break;
666
667 /* Fall through. */
668
669 case RDIV_EXPR:
670 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
671 {
672 tem = TREE_OPERAND (t, 1);
673 if (negate_expr_p (tem))
674 return fold_build2_loc (loc, TREE_CODE (t), type,
675 TREE_OPERAND (t, 0), negate_expr (tem));
676 tem = TREE_OPERAND (t, 0);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 negate_expr (tem), TREE_OPERAND (t, 1));
680 }
681 break;
682
683 case TRUNC_DIV_EXPR:
684 case ROUND_DIV_EXPR:
685 case FLOOR_DIV_EXPR:
686 case CEIL_DIV_EXPR:
687 case EXACT_DIV_EXPR:
688 /* In general we can't negate A / B, because if A is INT_MIN and
689 B is 1, we may turn this into INT_MIN / -1 which is undefined
690 and actually traps on some architectures. But if overflow is
691 undefined, we can negate, because - (INT_MIN / 1) is an
692 overflow. */
693 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
694 {
695 const char * const warnmsg = G_("assuming signed overflow does not "
696 "occur when negating a division");
697 tem = TREE_OPERAND (t, 1);
698 if (negate_expr_p (tem))
699 {
700 if (INTEGRAL_TYPE_P (type)
701 && (TREE_CODE (tem) != INTEGER_CST
702 || integer_onep (tem)))
703 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
704 return fold_build2_loc (loc, TREE_CODE (t), type,
705 TREE_OPERAND (t, 0), negate_expr (tem));
706 }
707 /* If overflow is undefined then we have to be careful because
708 we ask whether it's ok to associate the negate with the
709 division which is not ok for example for
710 -((a - b) / c) where (-(a - b)) / c may invoke undefined
711 overflow because of negating INT_MIN. So do not use
712 negate_expr_p here but open-code the two important cases. */
713 tem = TREE_OPERAND (t, 0);
714 if ((INTEGRAL_TYPE_P (type)
715 && (TREE_CODE (tem) == NEGATE_EXPR
716 || (TREE_CODE (tem) == INTEGER_CST
717 && may_negate_without_overflow_p (tem))))
718 || !INTEGRAL_TYPE_P (type))
719 return fold_build2_loc (loc, TREE_CODE (t), type,
720 negate_expr (tem), TREE_OPERAND (t, 1));
721 }
722 break;
723
724 case NOP_EXPR:
725 /* Convert -((double)float) into (double)(-float). */
726 if (TREE_CODE (type) == REAL_TYPE)
727 {
728 tem = strip_float_extensions (t);
729 if (tem != t && negate_expr_p (tem))
730 return fold_convert_loc (loc, type, negate_expr (tem));
731 }
732 break;
733
734 case CALL_EXPR:
735 /* Negate -f(x) as f(-x). */
736 if (negate_mathfn_p (builtin_mathfn_code (t))
737 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
738 {
739 tree fndecl, arg;
740
741 fndecl = get_callee_fndecl (t);
742 arg = negate_expr (CALL_EXPR_ARG (t, 0));
743 return build_call_expr_loc (loc, fndecl, 1, arg);
744 }
745 break;
746
747 case RSHIFT_EXPR:
748 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
749 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
750 {
751 tree op1 = TREE_OPERAND (t, 1);
752 if (TREE_INT_CST_HIGH (op1) == 0
753 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
754 == TREE_INT_CST_LOW (op1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
985 int overflowable)
986 {
987 double_int op1, op2, res, tmp;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 bool uns = TYPE_UNSIGNED (type);
991 bool overflow = false;
992
993 op1 = tree_to_double_int (arg1);
994 op2 = tree_to_double_int (arg2);
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = op1 | op2;
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = op1 ^ op2;
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = op1 & op2;
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1012 break;
1013
1014 case LSHIFT_EXPR:
1015 /* It's unclear from the C standard whether shifts can overflow.
1016 The following code ignores overflow; perhaps a C standard
1017 interpretation ruling is needed. */
1018 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1019 break;
1020
1021 case RROTATE_EXPR:
1022 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1023 break;
1024
1025 case LROTATE_EXPR:
1026 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1027 break;
1028
1029 case PLUS_EXPR:
1030 res = op1.add_with_sign (op2, false, &overflow);
1031 break;
1032
1033 case MINUS_EXPR:
1034 res = op1.sub_with_overflow (op2, &overflow);
1035 break;
1036
1037 case MULT_EXPR:
1038 res = op1.mul_with_sign (op2, false, &overflow);
1039 break;
1040
1041 case MULT_HIGHPART_EXPR:
1042 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1043 {
1044 bool dummy_overflow;
1045 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1046 return NULL_TREE;
1047 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1048 }
1049 else
1050 {
1051 bool dummy_overflow;
1052 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1053 is performed in twice the precision of arguments. */
1054 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1055 res = tmp.rshift (TYPE_PRECISION (type),
1056 2 * TYPE_PRECISION (type), !uns);
1057 }
1058 break;
1059
1060 case TRUNC_DIV_EXPR:
1061 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1062 case EXACT_DIV_EXPR:
1063 /* This is a shortcut for a common special case. */
1064 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1065 && !TREE_OVERFLOW (arg1)
1066 && !TREE_OVERFLOW (arg2)
1067 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1068 {
1069 if (code == CEIL_DIV_EXPR)
1070 op1.low += op2.low - 1;
1071
1072 res.low = op1.low / op2.low, res.high = 0;
1073 break;
1074 }
1075
1076 /* ... fall through ... */
1077
1078 case ROUND_DIV_EXPR:
1079 if (op2.is_zero ())
1080 return NULL_TREE;
1081 if (op2.is_one ())
1082 {
1083 res = op1;
1084 break;
1085 }
1086 if (op1 == op2 && !op1.is_zero ())
1087 {
1088 res = double_int_one;
1089 break;
1090 }
1091 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1092 break;
1093
1094 case TRUNC_MOD_EXPR:
1095 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1096 /* This is a shortcut for a common special case. */
1097 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1098 && !TREE_OVERFLOW (arg1)
1099 && !TREE_OVERFLOW (arg2)
1100 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1101 {
1102 if (code == CEIL_MOD_EXPR)
1103 op1.low += op2.low - 1;
1104 res.low = op1.low % op2.low, res.high = 0;
1105 break;
1106 }
1107
1108 /* ... fall through ... */
1109
1110 case ROUND_MOD_EXPR:
1111 if (op2.is_zero ())
1112 return NULL_TREE;
1113 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1114 break;
1115
1116 case MIN_EXPR:
1117 res = op1.min (op2, uns);
1118 break;
1119
1120 case MAX_EXPR:
1121 res = op1.max (op2, uns);
1122 break;
1123
1124 default:
1125 return NULL_TREE;
1126 }
1127
1128 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1129 (!uns && overflow)
1130 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1131
1132 return t;
1133 }
1134
1135 tree
1136 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1137 {
1138 return int_const_binop_1 (code, arg1, arg2, 1);
1139 }
1140
1141 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1142 constant. We assume ARG1 and ARG2 have the same data type, or at least
1143 are the same kind of constant and the same machine mode. Return zero if
1144 combining the constants is not allowed in the current operating mode. */
1145
1146 static tree
1147 const_binop (enum tree_code code, tree arg1, tree arg2)
1148 {
1149 /* Sanity check for the recursive cases. */
1150 if (!arg1 || !arg2)
1151 return NULL_TREE;
1152
1153 STRIP_NOPS (arg1);
1154 STRIP_NOPS (arg2);
1155
1156 if (TREE_CODE (arg1) == INTEGER_CST)
1157 return int_const_binop (code, arg1, arg2);
1158
1159 if (TREE_CODE (arg1) == REAL_CST)
1160 {
1161 enum machine_mode mode;
1162 REAL_VALUE_TYPE d1;
1163 REAL_VALUE_TYPE d2;
1164 REAL_VALUE_TYPE value;
1165 REAL_VALUE_TYPE result;
1166 bool inexact;
1167 tree t, type;
1168
1169 /* The following codes are handled by real_arithmetic. */
1170 switch (code)
1171 {
1172 case PLUS_EXPR:
1173 case MINUS_EXPR:
1174 case MULT_EXPR:
1175 case RDIV_EXPR:
1176 case MIN_EXPR:
1177 case MAX_EXPR:
1178 break;
1179
1180 default:
1181 return NULL_TREE;
1182 }
1183
1184 d1 = TREE_REAL_CST (arg1);
1185 d2 = TREE_REAL_CST (arg2);
1186
1187 type = TREE_TYPE (arg1);
1188 mode = TYPE_MODE (type);
1189
1190 /* Don't perform operation if we honor signaling NaNs and
1191 either operand is a NaN. */
1192 if (HONOR_SNANS (mode)
1193 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1194 return NULL_TREE;
1195
1196 /* Don't perform operation if it would raise a division
1197 by zero exception. */
1198 if (code == RDIV_EXPR
1199 && REAL_VALUES_EQUAL (d2, dconst0)
1200 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1201 return NULL_TREE;
1202
1203 /* If either operand is a NaN, just return it. Otherwise, set up
1204 for floating-point trap; we return an overflow. */
1205 if (REAL_VALUE_ISNAN (d1))
1206 return arg1;
1207 else if (REAL_VALUE_ISNAN (d2))
1208 return arg2;
1209
1210 inexact = real_arithmetic (&value, code, &d1, &d2);
1211 real_convert (&result, mode, &value);
1212
1213 /* Don't constant fold this floating point operation if
1214 the result has overflowed and flag_trapping_math. */
1215 if (flag_trapping_math
1216 && MODE_HAS_INFINITIES (mode)
1217 && REAL_VALUE_ISINF (result)
1218 && !REAL_VALUE_ISINF (d1)
1219 && !REAL_VALUE_ISINF (d2))
1220 return NULL_TREE;
1221
1222 /* Don't constant fold this floating point operation if the
1223 result may dependent upon the run-time rounding mode and
1224 flag_rounding_math is set, or if GCC's software emulation
1225 is unable to accurately represent the result. */
1226 if ((flag_rounding_math
1227 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1228 && (inexact || !real_identical (&result, &value)))
1229 return NULL_TREE;
1230
1231 t = build_real (type, result);
1232
1233 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1234 return t;
1235 }
1236
1237 if (TREE_CODE (arg1) == FIXED_CST)
1238 {
1239 FIXED_VALUE_TYPE f1;
1240 FIXED_VALUE_TYPE f2;
1241 FIXED_VALUE_TYPE result;
1242 tree t, type;
1243 int sat_p;
1244 bool overflow_p;
1245
1246 /* The following codes are handled by fixed_arithmetic. */
1247 switch (code)
1248 {
1249 case PLUS_EXPR:
1250 case MINUS_EXPR:
1251 case MULT_EXPR:
1252 case TRUNC_DIV_EXPR:
1253 f2 = TREE_FIXED_CST (arg2);
1254 break;
1255
1256 case LSHIFT_EXPR:
1257 case RSHIFT_EXPR:
1258 f2.data.high = TREE_INT_CST_HIGH (arg2);
1259 f2.data.low = TREE_INT_CST_LOW (arg2);
1260 f2.mode = SImode;
1261 break;
1262
1263 default:
1264 return NULL_TREE;
1265 }
1266
1267 f1 = TREE_FIXED_CST (arg1);
1268 type = TREE_TYPE (arg1);
1269 sat_p = TYPE_SATURATING (type);
1270 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1271 t = build_fixed (type, result);
1272 /* Propagate overflow flags. */
1273 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1274 TREE_OVERFLOW (t) = 1;
1275 return t;
1276 }
1277
1278 if (TREE_CODE (arg1) == COMPLEX_CST)
1279 {
1280 tree type = TREE_TYPE (arg1);
1281 tree r1 = TREE_REALPART (arg1);
1282 tree i1 = TREE_IMAGPART (arg1);
1283 tree r2 = TREE_REALPART (arg2);
1284 tree i2 = TREE_IMAGPART (arg2);
1285 tree real, imag;
1286
1287 switch (code)
1288 {
1289 case PLUS_EXPR:
1290 case MINUS_EXPR:
1291 real = const_binop (code, r1, r2);
1292 imag = const_binop (code, i1, i2);
1293 break;
1294
1295 case MULT_EXPR:
1296 if (COMPLEX_FLOAT_TYPE_P (type))
1297 return do_mpc_arg2 (arg1, arg2, type,
1298 /* do_nonfinite= */ folding_initializer,
1299 mpc_mul);
1300
1301 real = const_binop (MINUS_EXPR,
1302 const_binop (MULT_EXPR, r1, r2),
1303 const_binop (MULT_EXPR, i1, i2));
1304 imag = const_binop (PLUS_EXPR,
1305 const_binop (MULT_EXPR, r1, i2),
1306 const_binop (MULT_EXPR, i1, r2));
1307 break;
1308
1309 case RDIV_EXPR:
1310 if (COMPLEX_FLOAT_TYPE_P (type))
1311 return do_mpc_arg2 (arg1, arg2, type,
1312 /* do_nonfinite= */ folding_initializer,
1313 mpc_div);
1314 /* Fallthru ... */
1315 case TRUNC_DIV_EXPR:
1316 case CEIL_DIV_EXPR:
1317 case FLOOR_DIV_EXPR:
1318 case ROUND_DIV_EXPR:
1319 if (flag_complex_method == 0)
1320 {
1321 /* Keep this algorithm in sync with
1322 tree-complex.c:expand_complex_div_straight().
1323
1324 Expand complex division to scalars, straightforward algorithm.
1325 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1326 t = br*br + bi*bi
1327 */
1328 tree magsquared
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r2, r2),
1331 const_binop (MULT_EXPR, i2, i2));
1332 tree t1
1333 = const_binop (PLUS_EXPR,
1334 const_binop (MULT_EXPR, r1, r2),
1335 const_binop (MULT_EXPR, i1, i2));
1336 tree t2
1337 = const_binop (MINUS_EXPR,
1338 const_binop (MULT_EXPR, i1, r2),
1339 const_binop (MULT_EXPR, r1, i2));
1340
1341 real = const_binop (code, t1, magsquared);
1342 imag = const_binop (code, t2, magsquared);
1343 }
1344 else
1345 {
1346 /* Keep this algorithm in sync with
1347 tree-complex.c:expand_complex_div_wide().
1348
1349 Expand complex division to scalars, modified algorithm to minimize
1350 overflow with wide input ranges. */
1351 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1352 fold_abs_const (r2, TREE_TYPE (type)),
1353 fold_abs_const (i2, TREE_TYPE (type)));
1354
1355 if (integer_nonzerop (compare))
1356 {
1357 /* In the TRUE branch, we compute
1358 ratio = br/bi;
1359 div = (br * ratio) + bi;
1360 tr = (ar * ratio) + ai;
1361 ti = (ai * ratio) - ar;
1362 tr = tr / div;
1363 ti = ti / div; */
1364 tree ratio = const_binop (code, r2, i2);
1365 tree div = const_binop (PLUS_EXPR, i2,
1366 const_binop (MULT_EXPR, r2, ratio));
1367 real = const_binop (MULT_EXPR, r1, ratio);
1368 real = const_binop (PLUS_EXPR, real, i1);
1369 real = const_binop (code, real, div);
1370
1371 imag = const_binop (MULT_EXPR, i1, ratio);
1372 imag = const_binop (MINUS_EXPR, imag, r1);
1373 imag = const_binop (code, imag, div);
1374 }
1375 else
1376 {
1377 /* In the FALSE branch, we compute
1378 ratio = d/c;
1379 divisor = (d * ratio) + c;
1380 tr = (b * ratio) + a;
1381 ti = b - (a * ratio);
1382 tr = tr / div;
1383 ti = ti / div; */
1384 tree ratio = const_binop (code, i2, r2);
1385 tree div = const_binop (PLUS_EXPR, r2,
1386 const_binop (MULT_EXPR, i2, ratio));
1387
1388 real = const_binop (MULT_EXPR, i1, ratio);
1389 real = const_binop (PLUS_EXPR, real, r1);
1390 real = const_binop (code, real, div);
1391
1392 imag = const_binop (MULT_EXPR, r1, ratio);
1393 imag = const_binop (MINUS_EXPR, i1, imag);
1394 imag = const_binop (code, imag, div);
1395 }
1396 }
1397 break;
1398
1399 default:
1400 return NULL_TREE;
1401 }
1402
1403 if (real && imag)
1404 return build_complex (type, real, imag);
1405 }
1406
1407 if (TREE_CODE (arg1) == VECTOR_CST
1408 && TREE_CODE (arg2) == VECTOR_CST)
1409 {
1410 tree type = TREE_TYPE (arg1);
1411 int count = TYPE_VECTOR_SUBPARTS (type), i;
1412 tree *elts = XALLOCAVEC (tree, count);
1413
1414 for (i = 0; i < count; i++)
1415 {
1416 tree elem1 = VECTOR_CST_ELT (arg1, i);
1417 tree elem2 = VECTOR_CST_ELT (arg2, i);
1418
1419 elts[i] = const_binop (code, elem1, elem2);
1420
1421 /* It is possible that const_binop cannot handle the given
1422 code and return NULL_TREE */
1423 if (elts[i] == NULL_TREE)
1424 return NULL_TREE;
1425 }
1426
1427 return build_vector (type, elts);
1428 }
1429
1430 /* Shifts allow a scalar offset for a vector. */
1431 if (TREE_CODE (arg1) == VECTOR_CST
1432 && TREE_CODE (arg2) == INTEGER_CST)
1433 {
1434 tree type = TREE_TYPE (arg1);
1435 int count = TYPE_VECTOR_SUBPARTS (type), i;
1436 tree *elts = XALLOCAVEC (tree, count);
1437
1438 if (code == VEC_LSHIFT_EXPR
1439 || code == VEC_RSHIFT_EXPR)
1440 {
1441 if (!tree_fits_uhwi_p (arg2))
1442 return NULL_TREE;
1443
1444 unsigned HOST_WIDE_INT shiftc = tree_to_uhwi (arg2);
1445 unsigned HOST_WIDE_INT outerc = tree_to_uhwi (TYPE_SIZE (type));
1446 unsigned HOST_WIDE_INT innerc
1447 = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (type)));
1448 if (shiftc >= outerc || (shiftc % innerc) != 0)
1449 return NULL_TREE;
1450 int offset = shiftc / innerc;
1451 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1452 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1453 for !BYTES_BIG_ENDIAN picks first vector element, but
1454 for BYTES_BIG_ENDIAN last element from the vector. */
1455 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1456 offset = -offset;
1457 tree zero = build_zero_cst (TREE_TYPE (type));
1458 for (i = 0; i < count; i++)
1459 {
1460 if (i + offset < 0 || i + offset >= count)
1461 elts[i] = zero;
1462 else
1463 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1464 }
1465 }
1466 else
1467 for (i = 0; i < count; i++)
1468 {
1469 tree elem1 = VECTOR_CST_ELT (arg1, i);
1470
1471 elts[i] = const_binop (code, elem1, arg2);
1472
1473 /* It is possible that const_binop cannot handle the given
1474 code and return NULL_TREE */
1475 if (elts[i] == NULL_TREE)
1476 return NULL_TREE;
1477 }
1478
1479 return build_vector (type, elts);
1480 }
1481 return NULL_TREE;
1482 }
1483
1484 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1485 indicates which particular sizetype to create. */
1486
1487 tree
1488 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1489 {
1490 return build_int_cst (sizetype_tab[(int) kind], number);
1491 }
1492 \f
1493 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1494 is a tree code. The type of the result is taken from the operands.
1495 Both must be equivalent integer types, ala int_binop_types_match_p.
1496 If the operands are constant, so is the result. */
1497
1498 tree
1499 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1500 {
1501 tree type = TREE_TYPE (arg0);
1502
1503 if (arg0 == error_mark_node || arg1 == error_mark_node)
1504 return error_mark_node;
1505
1506 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1507 TREE_TYPE (arg1)));
1508
1509 /* Handle the special case of two integer constants faster. */
1510 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1511 {
1512 /* And some specific cases even faster than that. */
1513 if (code == PLUS_EXPR)
1514 {
1515 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1516 return arg1;
1517 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1518 return arg0;
1519 }
1520 else if (code == MINUS_EXPR)
1521 {
1522 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1523 return arg0;
1524 }
1525 else if (code == MULT_EXPR)
1526 {
1527 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1528 return arg1;
1529 }
1530
1531 /* Handle general case of two integer constants. For sizetype
1532 constant calculations we always want to know about overflow,
1533 even in the unsigned case. */
1534 return int_const_binop_1 (code, arg0, arg1, -1);
1535 }
1536
1537 return fold_build2_loc (loc, code, type, arg0, arg1);
1538 }
1539
1540 /* Given two values, either both of sizetype or both of bitsizetype,
1541 compute the difference between the two values. Return the value
1542 in signed type corresponding to the type of the operands. */
1543
1544 tree
1545 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1546 {
1547 tree type = TREE_TYPE (arg0);
1548 tree ctype;
1549
1550 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1551 TREE_TYPE (arg1)));
1552
1553 /* If the type is already signed, just do the simple thing. */
1554 if (!TYPE_UNSIGNED (type))
1555 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1556
1557 if (type == sizetype)
1558 ctype = ssizetype;
1559 else if (type == bitsizetype)
1560 ctype = sbitsizetype;
1561 else
1562 ctype = signed_type_for (type);
1563
1564 /* If either operand is not a constant, do the conversions to the signed
1565 type and subtract. The hardware will do the right thing with any
1566 overflow in the subtraction. */
1567 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1568 return size_binop_loc (loc, MINUS_EXPR,
1569 fold_convert_loc (loc, ctype, arg0),
1570 fold_convert_loc (loc, ctype, arg1));
1571
1572 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1573 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1574 overflow) and negate (which can't either). Special-case a result
1575 of zero while we're here. */
1576 if (tree_int_cst_equal (arg0, arg1))
1577 return build_int_cst (ctype, 0);
1578 else if (tree_int_cst_lt (arg1, arg0))
1579 return fold_convert_loc (loc, ctype,
1580 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1581 else
1582 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1583 fold_convert_loc (loc, ctype,
1584 size_binop_loc (loc,
1585 MINUS_EXPR,
1586 arg1, arg0)));
1587 }
1588 \f
1589 /* A subroutine of fold_convert_const handling conversions of an
1590 INTEGER_CST to another integer type. */
1591
1592 static tree
1593 fold_convert_const_int_from_int (tree type, const_tree arg1)
1594 {
1595 tree t;
1596
1597 /* Given an integer constant, make new constant with new type,
1598 appropriately sign-extended or truncated. */
1599 t = force_fit_type_double (type, tree_to_double_int (arg1),
1600 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1601 (TREE_INT_CST_HIGH (arg1) < 0
1602 && (TYPE_UNSIGNED (type)
1603 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1604 | TREE_OVERFLOW (arg1));
1605
1606 return t;
1607 }
1608
1609 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1610 to an integer type. */
1611
1612 static tree
1613 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1614 {
1615 int overflow = 0;
1616 tree t;
1617
1618 /* The following code implements the floating point to integer
1619 conversion rules required by the Java Language Specification,
1620 that IEEE NaNs are mapped to zero and values that overflow
1621 the target precision saturate, i.e. values greater than
1622 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1623 are mapped to INT_MIN. These semantics are allowed by the
1624 C and C++ standards that simply state that the behavior of
1625 FP-to-integer conversion is unspecified upon overflow. */
1626
1627 double_int val;
1628 REAL_VALUE_TYPE r;
1629 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1630
1631 switch (code)
1632 {
1633 case FIX_TRUNC_EXPR:
1634 real_trunc (&r, VOIDmode, &x);
1635 break;
1636
1637 default:
1638 gcc_unreachable ();
1639 }
1640
1641 /* If R is NaN, return zero and show we have an overflow. */
1642 if (REAL_VALUE_ISNAN (r))
1643 {
1644 overflow = 1;
1645 val = double_int_zero;
1646 }
1647
1648 /* See if R is less than the lower bound or greater than the
1649 upper bound. */
1650
1651 if (! overflow)
1652 {
1653 tree lt = TYPE_MIN_VALUE (type);
1654 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1655 if (REAL_VALUES_LESS (r, l))
1656 {
1657 overflow = 1;
1658 val = tree_to_double_int (lt);
1659 }
1660 }
1661
1662 if (! overflow)
1663 {
1664 tree ut = TYPE_MAX_VALUE (type);
1665 if (ut)
1666 {
1667 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1668 if (REAL_VALUES_LESS (u, r))
1669 {
1670 overflow = 1;
1671 val = tree_to_double_int (ut);
1672 }
1673 }
1674 }
1675
1676 if (! overflow)
1677 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1678
1679 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1680 return t;
1681 }
1682
1683 /* A subroutine of fold_convert_const handling conversions of a
1684 FIXED_CST to an integer type. */
1685
1686 static tree
1687 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1688 {
1689 tree t;
1690 double_int temp, temp_trunc;
1691 unsigned int mode;
1692
1693 /* Right shift FIXED_CST to temp by fbit. */
1694 temp = TREE_FIXED_CST (arg1).data;
1695 mode = TREE_FIXED_CST (arg1).mode;
1696 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1697 {
1698 temp = temp.rshift (GET_MODE_FBIT (mode),
1699 HOST_BITS_PER_DOUBLE_INT,
1700 SIGNED_FIXED_POINT_MODE_P (mode));
1701
1702 /* Left shift temp to temp_trunc by fbit. */
1703 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1704 HOST_BITS_PER_DOUBLE_INT,
1705 SIGNED_FIXED_POINT_MODE_P (mode));
1706 }
1707 else
1708 {
1709 temp = double_int_zero;
1710 temp_trunc = double_int_zero;
1711 }
1712
1713 /* If FIXED_CST is negative, we need to round the value toward 0.
1714 By checking if the fractional bits are not zero to add 1 to temp. */
1715 if (SIGNED_FIXED_POINT_MODE_P (mode)
1716 && temp_trunc.is_negative ()
1717 && TREE_FIXED_CST (arg1).data != temp_trunc)
1718 temp += double_int_one;
1719
1720 /* Given a fixed-point constant, make new constant with new type,
1721 appropriately sign-extended or truncated. */
1722 t = force_fit_type_double (type, temp, -1,
1723 (temp.is_negative ()
1724 && (TYPE_UNSIGNED (type)
1725 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1726 | TREE_OVERFLOW (arg1));
1727
1728 return t;
1729 }
1730
1731 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1732 to another floating point type. */
1733
1734 static tree
1735 fold_convert_const_real_from_real (tree type, const_tree arg1)
1736 {
1737 REAL_VALUE_TYPE value;
1738 tree t;
1739
1740 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1741 t = build_real (type, value);
1742
1743 /* If converting an infinity or NAN to a representation that doesn't
1744 have one, set the overflow bit so that we can produce some kind of
1745 error message at the appropriate point if necessary. It's not the
1746 most user-friendly message, but it's better than nothing. */
1747 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1748 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1749 TREE_OVERFLOW (t) = 1;
1750 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1751 && !MODE_HAS_NANS (TYPE_MODE (type)))
1752 TREE_OVERFLOW (t) = 1;
1753 /* Regular overflow, conversion produced an infinity in a mode that
1754 can't represent them. */
1755 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1756 && REAL_VALUE_ISINF (value)
1757 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1758 TREE_OVERFLOW (t) = 1;
1759 else
1760 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1761 return t;
1762 }
1763
1764 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1765 to a floating point type. */
1766
1767 static tree
1768 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1769 {
1770 REAL_VALUE_TYPE value;
1771 tree t;
1772
1773 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1774 t = build_real (type, value);
1775
1776 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1777 return t;
1778 }
1779
1780 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1781 to another fixed-point type. */
1782
1783 static tree
1784 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1785 {
1786 FIXED_VALUE_TYPE value;
1787 tree t;
1788 bool overflow_p;
1789
1790 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1791 TYPE_SATURATING (type));
1792 t = build_fixed (type, value);
1793
1794 /* Propagate overflow flags. */
1795 if (overflow_p | TREE_OVERFLOW (arg1))
1796 TREE_OVERFLOW (t) = 1;
1797 return t;
1798 }
1799
1800 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1801 to a fixed-point type. */
1802
1803 static tree
1804 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1805 {
1806 FIXED_VALUE_TYPE value;
1807 tree t;
1808 bool overflow_p;
1809
1810 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1811 TREE_INT_CST (arg1),
1812 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1813 TYPE_SATURATING (type));
1814 t = build_fixed (type, value);
1815
1816 /* Propagate overflow flags. */
1817 if (overflow_p | TREE_OVERFLOW (arg1))
1818 TREE_OVERFLOW (t) = 1;
1819 return t;
1820 }
1821
1822 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1823 to a fixed-point type. */
1824
1825 static tree
1826 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1827 {
1828 FIXED_VALUE_TYPE value;
1829 tree t;
1830 bool overflow_p;
1831
1832 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1833 &TREE_REAL_CST (arg1),
1834 TYPE_SATURATING (type));
1835 t = build_fixed (type, value);
1836
1837 /* Propagate overflow flags. */
1838 if (overflow_p | TREE_OVERFLOW (arg1))
1839 TREE_OVERFLOW (t) = 1;
1840 return t;
1841 }
1842
1843 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1844 type TYPE. If no simplification can be done return NULL_TREE. */
1845
1846 static tree
1847 fold_convert_const (enum tree_code code, tree type, tree arg1)
1848 {
1849 if (TREE_TYPE (arg1) == type)
1850 return arg1;
1851
1852 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1853 || TREE_CODE (type) == OFFSET_TYPE)
1854 {
1855 if (TREE_CODE (arg1) == INTEGER_CST)
1856 return fold_convert_const_int_from_int (type, arg1);
1857 else if (TREE_CODE (arg1) == REAL_CST)
1858 return fold_convert_const_int_from_real (code, type, arg1);
1859 else if (TREE_CODE (arg1) == FIXED_CST)
1860 return fold_convert_const_int_from_fixed (type, arg1);
1861 }
1862 else if (TREE_CODE (type) == REAL_TYPE)
1863 {
1864 if (TREE_CODE (arg1) == INTEGER_CST)
1865 return build_real_from_int_cst (type, arg1);
1866 else if (TREE_CODE (arg1) == REAL_CST)
1867 return fold_convert_const_real_from_real (type, arg1);
1868 else if (TREE_CODE (arg1) == FIXED_CST)
1869 return fold_convert_const_real_from_fixed (type, arg1);
1870 }
1871 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1872 {
1873 if (TREE_CODE (arg1) == FIXED_CST)
1874 return fold_convert_const_fixed_from_fixed (type, arg1);
1875 else if (TREE_CODE (arg1) == INTEGER_CST)
1876 return fold_convert_const_fixed_from_int (type, arg1);
1877 else if (TREE_CODE (arg1) == REAL_CST)
1878 return fold_convert_const_fixed_from_real (type, arg1);
1879 }
1880 return NULL_TREE;
1881 }
1882
1883 /* Construct a vector of zero elements of vector type TYPE. */
1884
1885 static tree
1886 build_zero_vector (tree type)
1887 {
1888 tree t;
1889
1890 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1891 return build_vector_from_val (type, t);
1892 }
1893
1894 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1895
1896 bool
1897 fold_convertible_p (const_tree type, const_tree arg)
1898 {
1899 tree orig = TREE_TYPE (arg);
1900
1901 if (type == orig)
1902 return true;
1903
1904 if (TREE_CODE (arg) == ERROR_MARK
1905 || TREE_CODE (type) == ERROR_MARK
1906 || TREE_CODE (orig) == ERROR_MARK)
1907 return false;
1908
1909 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1910 return true;
1911
1912 switch (TREE_CODE (type))
1913 {
1914 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1915 case POINTER_TYPE: case REFERENCE_TYPE:
1916 case OFFSET_TYPE:
1917 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1918 || TREE_CODE (orig) == OFFSET_TYPE)
1919 return true;
1920 return (TREE_CODE (orig) == VECTOR_TYPE
1921 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1922
1923 case REAL_TYPE:
1924 case FIXED_POINT_TYPE:
1925 case COMPLEX_TYPE:
1926 case VECTOR_TYPE:
1927 case VOID_TYPE:
1928 return TREE_CODE (type) == TREE_CODE (orig);
1929
1930 default:
1931 return false;
1932 }
1933 }
1934
1935 /* Convert expression ARG to type TYPE. Used by the middle-end for
1936 simple conversions in preference to calling the front-end's convert. */
1937
1938 tree
1939 fold_convert_loc (location_t loc, tree type, tree arg)
1940 {
1941 tree orig = TREE_TYPE (arg);
1942 tree tem;
1943
1944 if (type == orig)
1945 return arg;
1946
1947 if (TREE_CODE (arg) == ERROR_MARK
1948 || TREE_CODE (type) == ERROR_MARK
1949 || TREE_CODE (orig) == ERROR_MARK)
1950 return error_mark_node;
1951
1952 switch (TREE_CODE (type))
1953 {
1954 case POINTER_TYPE:
1955 case REFERENCE_TYPE:
1956 /* Handle conversions between pointers to different address spaces. */
1957 if (POINTER_TYPE_P (orig)
1958 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1959 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1960 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1961 /* fall through */
1962
1963 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1964 case OFFSET_TYPE:
1965 if (TREE_CODE (arg) == INTEGER_CST)
1966 {
1967 tem = fold_convert_const (NOP_EXPR, type, arg);
1968 if (tem != NULL_TREE)
1969 return tem;
1970 }
1971 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1972 || TREE_CODE (orig) == OFFSET_TYPE)
1973 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1974 if (TREE_CODE (orig) == COMPLEX_TYPE)
1975 return fold_convert_loc (loc, type,
1976 fold_build1_loc (loc, REALPART_EXPR,
1977 TREE_TYPE (orig), arg));
1978 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1979 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1980 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1981
1982 case REAL_TYPE:
1983 if (TREE_CODE (arg) == INTEGER_CST)
1984 {
1985 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1986 if (tem != NULL_TREE)
1987 return tem;
1988 }
1989 else if (TREE_CODE (arg) == REAL_CST)
1990 {
1991 tem = fold_convert_const (NOP_EXPR, type, arg);
1992 if (tem != NULL_TREE)
1993 return tem;
1994 }
1995 else if (TREE_CODE (arg) == FIXED_CST)
1996 {
1997 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1998 if (tem != NULL_TREE)
1999 return tem;
2000 }
2001
2002 switch (TREE_CODE (orig))
2003 {
2004 case INTEGER_TYPE:
2005 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2006 case POINTER_TYPE: case REFERENCE_TYPE:
2007 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2008
2009 case REAL_TYPE:
2010 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2011
2012 case FIXED_POINT_TYPE:
2013 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014
2015 case COMPLEX_TYPE:
2016 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 return fold_convert_loc (loc, type, tem);
2018
2019 default:
2020 gcc_unreachable ();
2021 }
2022
2023 case FIXED_POINT_TYPE:
2024 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2025 || TREE_CODE (arg) == REAL_CST)
2026 {
2027 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2028 if (tem != NULL_TREE)
2029 goto fold_convert_exit;
2030 }
2031
2032 switch (TREE_CODE (orig))
2033 {
2034 case FIXED_POINT_TYPE:
2035 case INTEGER_TYPE:
2036 case ENUMERAL_TYPE:
2037 case BOOLEAN_TYPE:
2038 case REAL_TYPE:
2039 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2040
2041 case COMPLEX_TYPE:
2042 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2043 return fold_convert_loc (loc, type, tem);
2044
2045 default:
2046 gcc_unreachable ();
2047 }
2048
2049 case COMPLEX_TYPE:
2050 switch (TREE_CODE (orig))
2051 {
2052 case INTEGER_TYPE:
2053 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2054 case POINTER_TYPE: case REFERENCE_TYPE:
2055 case REAL_TYPE:
2056 case FIXED_POINT_TYPE:
2057 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2058 fold_convert_loc (loc, TREE_TYPE (type), arg),
2059 fold_convert_loc (loc, TREE_TYPE (type),
2060 integer_zero_node));
2061 case COMPLEX_TYPE:
2062 {
2063 tree rpart, ipart;
2064
2065 if (TREE_CODE (arg) == COMPLEX_EXPR)
2066 {
2067 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2068 TREE_OPERAND (arg, 0));
2069 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2070 TREE_OPERAND (arg, 1));
2071 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2072 }
2073
2074 arg = save_expr (arg);
2075 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2076 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2077 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2078 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2079 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2080 }
2081
2082 default:
2083 gcc_unreachable ();
2084 }
2085
2086 case VECTOR_TYPE:
2087 if (integer_zerop (arg))
2088 return build_zero_vector (type);
2089 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2090 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2091 || TREE_CODE (orig) == VECTOR_TYPE);
2092 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2093
2094 case VOID_TYPE:
2095 tem = fold_ignored_result (arg);
2096 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2097
2098 default:
2099 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2100 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2101 gcc_unreachable ();
2102 }
2103 fold_convert_exit:
2104 protected_set_expr_location_unshare (tem, loc);
2105 return tem;
2106 }
2107 \f
2108 /* Return false if expr can be assumed not to be an lvalue, true
2109 otherwise. */
2110
2111 static bool
2112 maybe_lvalue_p (const_tree x)
2113 {
2114 /* We only need to wrap lvalue tree codes. */
2115 switch (TREE_CODE (x))
2116 {
2117 case VAR_DECL:
2118 case PARM_DECL:
2119 case RESULT_DECL:
2120 case LABEL_DECL:
2121 case FUNCTION_DECL:
2122 case SSA_NAME:
2123
2124 case COMPONENT_REF:
2125 case MEM_REF:
2126 case INDIRECT_REF:
2127 case ARRAY_REF:
2128 case ARRAY_RANGE_REF:
2129 case BIT_FIELD_REF:
2130 case OBJ_TYPE_REF:
2131
2132 case REALPART_EXPR:
2133 case IMAGPART_EXPR:
2134 case PREINCREMENT_EXPR:
2135 case PREDECREMENT_EXPR:
2136 case SAVE_EXPR:
2137 case TRY_CATCH_EXPR:
2138 case WITH_CLEANUP_EXPR:
2139 case COMPOUND_EXPR:
2140 case MODIFY_EXPR:
2141 case TARGET_EXPR:
2142 case COND_EXPR:
2143 case BIND_EXPR:
2144 break;
2145
2146 default:
2147 /* Assume the worst for front-end tree codes. */
2148 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2149 break;
2150 return false;
2151 }
2152
2153 return true;
2154 }
2155
2156 /* Return an expr equal to X but certainly not valid as an lvalue. */
2157
2158 tree
2159 non_lvalue_loc (location_t loc, tree x)
2160 {
2161 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2162 us. */
2163 if (in_gimple_form)
2164 return x;
2165
2166 if (! maybe_lvalue_p (x))
2167 return x;
2168 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2169 }
2170
2171 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2172 Zero means allow extended lvalues. */
2173
2174 int pedantic_lvalues;
2175
2176 /* When pedantic, return an expr equal to X but certainly not valid as a
2177 pedantic lvalue. Otherwise, return X. */
2178
2179 static tree
2180 pedantic_non_lvalue_loc (location_t loc, tree x)
2181 {
2182 if (pedantic_lvalues)
2183 return non_lvalue_loc (loc, x);
2184
2185 return protected_set_expr_location_unshare (x, loc);
2186 }
2187 \f
2188 /* Given a tree comparison code, return the code that is the logical inverse.
2189 It is generally not safe to do this for floating-point comparisons, except
2190 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2191 ERROR_MARK in this case. */
2192
2193 enum tree_code
2194 invert_tree_comparison (enum tree_code code, bool honor_nans)
2195 {
2196 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2197 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2198 return ERROR_MARK;
2199
2200 switch (code)
2201 {
2202 case EQ_EXPR:
2203 return NE_EXPR;
2204 case NE_EXPR:
2205 return EQ_EXPR;
2206 case GT_EXPR:
2207 return honor_nans ? UNLE_EXPR : LE_EXPR;
2208 case GE_EXPR:
2209 return honor_nans ? UNLT_EXPR : LT_EXPR;
2210 case LT_EXPR:
2211 return honor_nans ? UNGE_EXPR : GE_EXPR;
2212 case LE_EXPR:
2213 return honor_nans ? UNGT_EXPR : GT_EXPR;
2214 case LTGT_EXPR:
2215 return UNEQ_EXPR;
2216 case UNEQ_EXPR:
2217 return LTGT_EXPR;
2218 case UNGT_EXPR:
2219 return LE_EXPR;
2220 case UNGE_EXPR:
2221 return LT_EXPR;
2222 case UNLT_EXPR:
2223 return GE_EXPR;
2224 case UNLE_EXPR:
2225 return GT_EXPR;
2226 case ORDERED_EXPR:
2227 return UNORDERED_EXPR;
2228 case UNORDERED_EXPR:
2229 return ORDERED_EXPR;
2230 default:
2231 gcc_unreachable ();
2232 }
2233 }
2234
2235 /* Similar, but return the comparison that results if the operands are
2236 swapped. This is safe for floating-point. */
2237
2238 enum tree_code
2239 swap_tree_comparison (enum tree_code code)
2240 {
2241 switch (code)
2242 {
2243 case EQ_EXPR:
2244 case NE_EXPR:
2245 case ORDERED_EXPR:
2246 case UNORDERED_EXPR:
2247 case LTGT_EXPR:
2248 case UNEQ_EXPR:
2249 return code;
2250 case GT_EXPR:
2251 return LT_EXPR;
2252 case GE_EXPR:
2253 return LE_EXPR;
2254 case LT_EXPR:
2255 return GT_EXPR;
2256 case LE_EXPR:
2257 return GE_EXPR;
2258 case UNGT_EXPR:
2259 return UNLT_EXPR;
2260 case UNGE_EXPR:
2261 return UNLE_EXPR;
2262 case UNLT_EXPR:
2263 return UNGT_EXPR;
2264 case UNLE_EXPR:
2265 return UNGE_EXPR;
2266 default:
2267 gcc_unreachable ();
2268 }
2269 }
2270
2271
2272 /* Convert a comparison tree code from an enum tree_code representation
2273 into a compcode bit-based encoding. This function is the inverse of
2274 compcode_to_comparison. */
2275
2276 static enum comparison_code
2277 comparison_to_compcode (enum tree_code code)
2278 {
2279 switch (code)
2280 {
2281 case LT_EXPR:
2282 return COMPCODE_LT;
2283 case EQ_EXPR:
2284 return COMPCODE_EQ;
2285 case LE_EXPR:
2286 return COMPCODE_LE;
2287 case GT_EXPR:
2288 return COMPCODE_GT;
2289 case NE_EXPR:
2290 return COMPCODE_NE;
2291 case GE_EXPR:
2292 return COMPCODE_GE;
2293 case ORDERED_EXPR:
2294 return COMPCODE_ORD;
2295 case UNORDERED_EXPR:
2296 return COMPCODE_UNORD;
2297 case UNLT_EXPR:
2298 return COMPCODE_UNLT;
2299 case UNEQ_EXPR:
2300 return COMPCODE_UNEQ;
2301 case UNLE_EXPR:
2302 return COMPCODE_UNLE;
2303 case UNGT_EXPR:
2304 return COMPCODE_UNGT;
2305 case LTGT_EXPR:
2306 return COMPCODE_LTGT;
2307 case UNGE_EXPR:
2308 return COMPCODE_UNGE;
2309 default:
2310 gcc_unreachable ();
2311 }
2312 }
2313
2314 /* Convert a compcode bit-based encoding of a comparison operator back
2315 to GCC's enum tree_code representation. This function is the
2316 inverse of comparison_to_compcode. */
2317
2318 static enum tree_code
2319 compcode_to_comparison (enum comparison_code code)
2320 {
2321 switch (code)
2322 {
2323 case COMPCODE_LT:
2324 return LT_EXPR;
2325 case COMPCODE_EQ:
2326 return EQ_EXPR;
2327 case COMPCODE_LE:
2328 return LE_EXPR;
2329 case COMPCODE_GT:
2330 return GT_EXPR;
2331 case COMPCODE_NE:
2332 return NE_EXPR;
2333 case COMPCODE_GE:
2334 return GE_EXPR;
2335 case COMPCODE_ORD:
2336 return ORDERED_EXPR;
2337 case COMPCODE_UNORD:
2338 return UNORDERED_EXPR;
2339 case COMPCODE_UNLT:
2340 return UNLT_EXPR;
2341 case COMPCODE_UNEQ:
2342 return UNEQ_EXPR;
2343 case COMPCODE_UNLE:
2344 return UNLE_EXPR;
2345 case COMPCODE_UNGT:
2346 return UNGT_EXPR;
2347 case COMPCODE_LTGT:
2348 return LTGT_EXPR;
2349 case COMPCODE_UNGE:
2350 return UNGE_EXPR;
2351 default:
2352 gcc_unreachable ();
2353 }
2354 }
2355
2356 /* Return a tree for the comparison which is the combination of
2357 doing the AND or OR (depending on CODE) of the two operations LCODE
2358 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2359 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2360 if this makes the transformation invalid. */
2361
2362 tree
2363 combine_comparisons (location_t loc,
2364 enum tree_code code, enum tree_code lcode,
2365 enum tree_code rcode, tree truth_type,
2366 tree ll_arg, tree lr_arg)
2367 {
2368 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2369 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2370 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2371 int compcode;
2372
2373 switch (code)
2374 {
2375 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2376 compcode = lcompcode & rcompcode;
2377 break;
2378
2379 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2380 compcode = lcompcode | rcompcode;
2381 break;
2382
2383 default:
2384 return NULL_TREE;
2385 }
2386
2387 if (!honor_nans)
2388 {
2389 /* Eliminate unordered comparisons, as well as LTGT and ORD
2390 which are not used unless the mode has NaNs. */
2391 compcode &= ~COMPCODE_UNORD;
2392 if (compcode == COMPCODE_LTGT)
2393 compcode = COMPCODE_NE;
2394 else if (compcode == COMPCODE_ORD)
2395 compcode = COMPCODE_TRUE;
2396 }
2397 else if (flag_trapping_math)
2398 {
2399 /* Check that the original operation and the optimized ones will trap
2400 under the same condition. */
2401 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2402 && (lcompcode != COMPCODE_EQ)
2403 && (lcompcode != COMPCODE_ORD);
2404 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2405 && (rcompcode != COMPCODE_EQ)
2406 && (rcompcode != COMPCODE_ORD);
2407 bool trap = (compcode & COMPCODE_UNORD) == 0
2408 && (compcode != COMPCODE_EQ)
2409 && (compcode != COMPCODE_ORD);
2410
2411 /* In a short-circuited boolean expression the LHS might be
2412 such that the RHS, if evaluated, will never trap. For
2413 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2414 if neither x nor y is NaN. (This is a mixed blessing: for
2415 example, the expression above will never trap, hence
2416 optimizing it to x < y would be invalid). */
2417 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2418 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2419 rtrap = false;
2420
2421 /* If the comparison was short-circuited, and only the RHS
2422 trapped, we may now generate a spurious trap. */
2423 if (rtrap && !ltrap
2424 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2425 return NULL_TREE;
2426
2427 /* If we changed the conditions that cause a trap, we lose. */
2428 if ((ltrap || rtrap) != trap)
2429 return NULL_TREE;
2430 }
2431
2432 if (compcode == COMPCODE_TRUE)
2433 return constant_boolean_node (true, truth_type);
2434 else if (compcode == COMPCODE_FALSE)
2435 return constant_boolean_node (false, truth_type);
2436 else
2437 {
2438 enum tree_code tcode;
2439
2440 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2441 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2442 }
2443 }
2444 \f
2445 /* Return nonzero if two operands (typically of the same tree node)
2446 are necessarily equal. If either argument has side-effects this
2447 function returns zero. FLAGS modifies behavior as follows:
2448
2449 If OEP_ONLY_CONST is set, only return nonzero for constants.
2450 This function tests whether the operands are indistinguishable;
2451 it does not test whether they are equal using C's == operation.
2452 The distinction is important for IEEE floating point, because
2453 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2454 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2455
2456 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2457 even though it may hold multiple values during a function.
2458 This is because a GCC tree node guarantees that nothing else is
2459 executed between the evaluation of its "operands" (which may often
2460 be evaluated in arbitrary order). Hence if the operands themselves
2461 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2462 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2463 unset means assuming isochronic (or instantaneous) tree equivalence.
2464 Unless comparing arbitrary expression trees, such as from different
2465 statements, this flag can usually be left unset.
2466
2467 If OEP_PURE_SAME is set, then pure functions with identical arguments
2468 are considered the same. It is used when the caller has other ways
2469 to ensure that global memory is unchanged in between. */
2470
2471 int
2472 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2473 {
2474 /* If either is ERROR_MARK, they aren't equal. */
2475 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2476 || TREE_TYPE (arg0) == error_mark_node
2477 || TREE_TYPE (arg1) == error_mark_node)
2478 return 0;
2479
2480 /* Similar, if either does not have a type (like a released SSA name),
2481 they aren't equal. */
2482 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2483 return 0;
2484
2485 /* Check equality of integer constants before bailing out due to
2486 precision differences. */
2487 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2488 return tree_int_cst_equal (arg0, arg1);
2489
2490 /* If both types don't have the same signedness, then we can't consider
2491 them equal. We must check this before the STRIP_NOPS calls
2492 because they may change the signedness of the arguments. As pointers
2493 strictly don't have a signedness, require either two pointers or
2494 two non-pointers as well. */
2495 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2496 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2497 return 0;
2498
2499 /* We cannot consider pointers to different address space equal. */
2500 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2501 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2502 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2503 return 0;
2504
2505 /* If both types don't have the same precision, then it is not safe
2506 to strip NOPs. */
2507 if (element_precision (TREE_TYPE (arg0))
2508 != element_precision (TREE_TYPE (arg1)))
2509 return 0;
2510
2511 STRIP_NOPS (arg0);
2512 STRIP_NOPS (arg1);
2513
2514 /* In case both args are comparisons but with different comparison
2515 code, try to swap the comparison operands of one arg to produce
2516 a match and compare that variant. */
2517 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2518 && COMPARISON_CLASS_P (arg0)
2519 && COMPARISON_CLASS_P (arg1))
2520 {
2521 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2522
2523 if (TREE_CODE (arg0) == swap_code)
2524 return operand_equal_p (TREE_OPERAND (arg0, 0),
2525 TREE_OPERAND (arg1, 1), flags)
2526 && operand_equal_p (TREE_OPERAND (arg0, 1),
2527 TREE_OPERAND (arg1, 0), flags);
2528 }
2529
2530 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2531 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2532 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2533 return 0;
2534
2535 /* This is needed for conversions and for COMPONENT_REF.
2536 Might as well play it safe and always test this. */
2537 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2538 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2539 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2540 return 0;
2541
2542 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2543 We don't care about side effects in that case because the SAVE_EXPR
2544 takes care of that for us. In all other cases, two expressions are
2545 equal if they have no side effects. If we have two identical
2546 expressions with side effects that should be treated the same due
2547 to the only side effects being identical SAVE_EXPR's, that will
2548 be detected in the recursive calls below.
2549 If we are taking an invariant address of two identical objects
2550 they are necessarily equal as well. */
2551 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2552 && (TREE_CODE (arg0) == SAVE_EXPR
2553 || (flags & OEP_CONSTANT_ADDRESS_OF)
2554 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2555 return 1;
2556
2557 /* Next handle constant cases, those for which we can return 1 even
2558 if ONLY_CONST is set. */
2559 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2560 switch (TREE_CODE (arg0))
2561 {
2562 case INTEGER_CST:
2563 return tree_int_cst_equal (arg0, arg1);
2564
2565 case FIXED_CST:
2566 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2567 TREE_FIXED_CST (arg1));
2568
2569 case REAL_CST:
2570 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2571 TREE_REAL_CST (arg1)))
2572 return 1;
2573
2574
2575 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2576 {
2577 /* If we do not distinguish between signed and unsigned zero,
2578 consider them equal. */
2579 if (real_zerop (arg0) && real_zerop (arg1))
2580 return 1;
2581 }
2582 return 0;
2583
2584 case VECTOR_CST:
2585 {
2586 unsigned i;
2587
2588 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2589 return 0;
2590
2591 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2592 {
2593 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2594 VECTOR_CST_ELT (arg1, i), flags))
2595 return 0;
2596 }
2597 return 1;
2598 }
2599
2600 case COMPLEX_CST:
2601 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2602 flags)
2603 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2604 flags));
2605
2606 case STRING_CST:
2607 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2608 && ! memcmp (TREE_STRING_POINTER (arg0),
2609 TREE_STRING_POINTER (arg1),
2610 TREE_STRING_LENGTH (arg0)));
2611
2612 case ADDR_EXPR:
2613 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2614 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2615 ? OEP_CONSTANT_ADDRESS_OF : 0);
2616 default:
2617 break;
2618 }
2619
2620 if (flags & OEP_ONLY_CONST)
2621 return 0;
2622
2623 /* Define macros to test an operand from arg0 and arg1 for equality and a
2624 variant that allows null and views null as being different from any
2625 non-null value. In the latter case, if either is null, the both
2626 must be; otherwise, do the normal comparison. */
2627 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2628 TREE_OPERAND (arg1, N), flags)
2629
2630 #define OP_SAME_WITH_NULL(N) \
2631 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2632 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2633
2634 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2635 {
2636 case tcc_unary:
2637 /* Two conversions are equal only if signedness and modes match. */
2638 switch (TREE_CODE (arg0))
2639 {
2640 CASE_CONVERT:
2641 case FIX_TRUNC_EXPR:
2642 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2643 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2644 return 0;
2645 break;
2646 default:
2647 break;
2648 }
2649
2650 return OP_SAME (0);
2651
2652
2653 case tcc_comparison:
2654 case tcc_binary:
2655 if (OP_SAME (0) && OP_SAME (1))
2656 return 1;
2657
2658 /* For commutative ops, allow the other order. */
2659 return (commutative_tree_code (TREE_CODE (arg0))
2660 && operand_equal_p (TREE_OPERAND (arg0, 0),
2661 TREE_OPERAND (arg1, 1), flags)
2662 && operand_equal_p (TREE_OPERAND (arg0, 1),
2663 TREE_OPERAND (arg1, 0), flags));
2664
2665 case tcc_reference:
2666 /* If either of the pointer (or reference) expressions we are
2667 dereferencing contain a side effect, these cannot be equal,
2668 but their addresses can be. */
2669 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2670 && (TREE_SIDE_EFFECTS (arg0)
2671 || TREE_SIDE_EFFECTS (arg1)))
2672 return 0;
2673
2674 switch (TREE_CODE (arg0))
2675 {
2676 case INDIRECT_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 return OP_SAME (0);
2679
2680 case REALPART_EXPR:
2681 case IMAGPART_EXPR:
2682 return OP_SAME (0);
2683
2684 case TARGET_MEM_REF:
2685 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2686 /* Require equal extra operands and then fall through to MEM_REF
2687 handling of the two common operands. */
2688 if (!OP_SAME_WITH_NULL (2)
2689 || !OP_SAME_WITH_NULL (3)
2690 || !OP_SAME_WITH_NULL (4))
2691 return 0;
2692 /* Fallthru. */
2693 case MEM_REF:
2694 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2695 /* Require equal access sizes, and similar pointer types.
2696 We can have incomplete types for array references of
2697 variable-sized arrays from the Fortran frontend
2698 though. Also verify the types are compatible. */
2699 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2700 || (TYPE_SIZE (TREE_TYPE (arg0))
2701 && TYPE_SIZE (TREE_TYPE (arg1))
2702 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2703 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2704 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2705 && alias_ptr_types_compatible_p
2706 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2707 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2708 && OP_SAME (0) && OP_SAME (1));
2709
2710 case ARRAY_REF:
2711 case ARRAY_RANGE_REF:
2712 /* Operands 2 and 3 may be null.
2713 Compare the array index by value if it is constant first as we
2714 may have different types but same value here. */
2715 if (!OP_SAME (0))
2716 return 0;
2717 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2718 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2719 TREE_OPERAND (arg1, 1))
2720 || OP_SAME (1))
2721 && OP_SAME_WITH_NULL (2)
2722 && OP_SAME_WITH_NULL (3));
2723
2724 case COMPONENT_REF:
2725 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2726 may be NULL when we're called to compare MEM_EXPRs. */
2727 if (!OP_SAME_WITH_NULL (0)
2728 || !OP_SAME (1))
2729 return 0;
2730 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2731 return OP_SAME_WITH_NULL (2);
2732
2733 case BIT_FIELD_REF:
2734 if (!OP_SAME (0))
2735 return 0;
2736 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2737 return OP_SAME (1) && OP_SAME (2);
2738
2739 default:
2740 return 0;
2741 }
2742
2743 case tcc_expression:
2744 switch (TREE_CODE (arg0))
2745 {
2746 case ADDR_EXPR:
2747 case TRUTH_NOT_EXPR:
2748 return OP_SAME (0);
2749
2750 case TRUTH_ANDIF_EXPR:
2751 case TRUTH_ORIF_EXPR:
2752 return OP_SAME (0) && OP_SAME (1);
2753
2754 case FMA_EXPR:
2755 case WIDEN_MULT_PLUS_EXPR:
2756 case WIDEN_MULT_MINUS_EXPR:
2757 if (!OP_SAME (2))
2758 return 0;
2759 /* The multiplcation operands are commutative. */
2760 /* FALLTHRU */
2761
2762 case TRUTH_AND_EXPR:
2763 case TRUTH_OR_EXPR:
2764 case TRUTH_XOR_EXPR:
2765 if (OP_SAME (0) && OP_SAME (1))
2766 return 1;
2767
2768 /* Otherwise take into account this is a commutative operation. */
2769 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2770 TREE_OPERAND (arg1, 1), flags)
2771 && operand_equal_p (TREE_OPERAND (arg0, 1),
2772 TREE_OPERAND (arg1, 0), flags));
2773
2774 case COND_EXPR:
2775 case VEC_COND_EXPR:
2776 case DOT_PROD_EXPR:
2777 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2778
2779 default:
2780 return 0;
2781 }
2782
2783 case tcc_vl_exp:
2784 switch (TREE_CODE (arg0))
2785 {
2786 case CALL_EXPR:
2787 /* If the CALL_EXPRs call different functions, then they
2788 clearly can not be equal. */
2789 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2790 flags))
2791 return 0;
2792
2793 {
2794 unsigned int cef = call_expr_flags (arg0);
2795 if (flags & OEP_PURE_SAME)
2796 cef &= ECF_CONST | ECF_PURE;
2797 else
2798 cef &= ECF_CONST;
2799 if (!cef)
2800 return 0;
2801 }
2802
2803 /* Now see if all the arguments are the same. */
2804 {
2805 const_call_expr_arg_iterator iter0, iter1;
2806 const_tree a0, a1;
2807 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2808 a1 = first_const_call_expr_arg (arg1, &iter1);
2809 a0 && a1;
2810 a0 = next_const_call_expr_arg (&iter0),
2811 a1 = next_const_call_expr_arg (&iter1))
2812 if (! operand_equal_p (a0, a1, flags))
2813 return 0;
2814
2815 /* If we get here and both argument lists are exhausted
2816 then the CALL_EXPRs are equal. */
2817 return ! (a0 || a1);
2818 }
2819 default:
2820 return 0;
2821 }
2822
2823 case tcc_declaration:
2824 /* Consider __builtin_sqrt equal to sqrt. */
2825 return (TREE_CODE (arg0) == FUNCTION_DECL
2826 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2827 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2828 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2829
2830 default:
2831 return 0;
2832 }
2833
2834 #undef OP_SAME
2835 #undef OP_SAME_WITH_NULL
2836 }
2837 \f
2838 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2839 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2840
2841 When in doubt, return 0. */
2842
2843 static int
2844 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2845 {
2846 int unsignedp1, unsignedpo;
2847 tree primarg0, primarg1, primother;
2848 unsigned int correct_width;
2849
2850 if (operand_equal_p (arg0, arg1, 0))
2851 return 1;
2852
2853 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2854 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2855 return 0;
2856
2857 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2858 and see if the inner values are the same. This removes any
2859 signedness comparison, which doesn't matter here. */
2860 primarg0 = arg0, primarg1 = arg1;
2861 STRIP_NOPS (primarg0);
2862 STRIP_NOPS (primarg1);
2863 if (operand_equal_p (primarg0, primarg1, 0))
2864 return 1;
2865
2866 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2867 actual comparison operand, ARG0.
2868
2869 First throw away any conversions to wider types
2870 already present in the operands. */
2871
2872 primarg1 = get_narrower (arg1, &unsignedp1);
2873 primother = get_narrower (other, &unsignedpo);
2874
2875 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2876 if (unsignedp1 == unsignedpo
2877 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2878 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2879 {
2880 tree type = TREE_TYPE (arg0);
2881
2882 /* Make sure shorter operand is extended the right way
2883 to match the longer operand. */
2884 primarg1 = fold_convert (signed_or_unsigned_type_for
2885 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2886
2887 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2888 return 1;
2889 }
2890
2891 return 0;
2892 }
2893 \f
2894 /* See if ARG is an expression that is either a comparison or is performing
2895 arithmetic on comparisons. The comparisons must only be comparing
2896 two different values, which will be stored in *CVAL1 and *CVAL2; if
2897 they are nonzero it means that some operands have already been found.
2898 No variables may be used anywhere else in the expression except in the
2899 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2900 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2901
2902 If this is true, return 1. Otherwise, return zero. */
2903
2904 static int
2905 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2906 {
2907 enum tree_code code = TREE_CODE (arg);
2908 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2909
2910 /* We can handle some of the tcc_expression cases here. */
2911 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2912 tclass = tcc_unary;
2913 else if (tclass == tcc_expression
2914 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2915 || code == COMPOUND_EXPR))
2916 tclass = tcc_binary;
2917
2918 else if (tclass == tcc_expression && code == SAVE_EXPR
2919 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2920 {
2921 /* If we've already found a CVAL1 or CVAL2, this expression is
2922 two complex to handle. */
2923 if (*cval1 || *cval2)
2924 return 0;
2925
2926 tclass = tcc_unary;
2927 *save_p = 1;
2928 }
2929
2930 switch (tclass)
2931 {
2932 case tcc_unary:
2933 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2934
2935 case tcc_binary:
2936 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2937 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2938 cval1, cval2, save_p));
2939
2940 case tcc_constant:
2941 return 1;
2942
2943 case tcc_expression:
2944 if (code == COND_EXPR)
2945 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2946 cval1, cval2, save_p)
2947 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2948 cval1, cval2, save_p)
2949 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2950 cval1, cval2, save_p));
2951 return 0;
2952
2953 case tcc_comparison:
2954 /* First see if we can handle the first operand, then the second. For
2955 the second operand, we know *CVAL1 can't be zero. It must be that
2956 one side of the comparison is each of the values; test for the
2957 case where this isn't true by failing if the two operands
2958 are the same. */
2959
2960 if (operand_equal_p (TREE_OPERAND (arg, 0),
2961 TREE_OPERAND (arg, 1), 0))
2962 return 0;
2963
2964 if (*cval1 == 0)
2965 *cval1 = TREE_OPERAND (arg, 0);
2966 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2967 ;
2968 else if (*cval2 == 0)
2969 *cval2 = TREE_OPERAND (arg, 0);
2970 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2971 ;
2972 else
2973 return 0;
2974
2975 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2976 ;
2977 else if (*cval2 == 0)
2978 *cval2 = TREE_OPERAND (arg, 1);
2979 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2980 ;
2981 else
2982 return 0;
2983
2984 return 1;
2985
2986 default:
2987 return 0;
2988 }
2989 }
2990 \f
2991 /* ARG is a tree that is known to contain just arithmetic operations and
2992 comparisons. Evaluate the operations in the tree substituting NEW0 for
2993 any occurrence of OLD0 as an operand of a comparison and likewise for
2994 NEW1 and OLD1. */
2995
2996 static tree
2997 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2998 tree old1, tree new1)
2999 {
3000 tree type = TREE_TYPE (arg);
3001 enum tree_code code = TREE_CODE (arg);
3002 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3003
3004 /* We can handle some of the tcc_expression cases here. */
3005 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3006 tclass = tcc_unary;
3007 else if (tclass == tcc_expression
3008 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3009 tclass = tcc_binary;
3010
3011 switch (tclass)
3012 {
3013 case tcc_unary:
3014 return fold_build1_loc (loc, code, type,
3015 eval_subst (loc, TREE_OPERAND (arg, 0),
3016 old0, new0, old1, new1));
3017
3018 case tcc_binary:
3019 return fold_build2_loc (loc, code, type,
3020 eval_subst (loc, TREE_OPERAND (arg, 0),
3021 old0, new0, old1, new1),
3022 eval_subst (loc, TREE_OPERAND (arg, 1),
3023 old0, new0, old1, new1));
3024
3025 case tcc_expression:
3026 switch (code)
3027 {
3028 case SAVE_EXPR:
3029 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3030 old1, new1);
3031
3032 case COMPOUND_EXPR:
3033 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3034 old1, new1);
3035
3036 case COND_EXPR:
3037 return fold_build3_loc (loc, code, type,
3038 eval_subst (loc, TREE_OPERAND (arg, 0),
3039 old0, new0, old1, new1),
3040 eval_subst (loc, TREE_OPERAND (arg, 1),
3041 old0, new0, old1, new1),
3042 eval_subst (loc, TREE_OPERAND (arg, 2),
3043 old0, new0, old1, new1));
3044 default:
3045 break;
3046 }
3047 /* Fall through - ??? */
3048
3049 case tcc_comparison:
3050 {
3051 tree arg0 = TREE_OPERAND (arg, 0);
3052 tree arg1 = TREE_OPERAND (arg, 1);
3053
3054 /* We need to check both for exact equality and tree equality. The
3055 former will be true if the operand has a side-effect. In that
3056 case, we know the operand occurred exactly once. */
3057
3058 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3059 arg0 = new0;
3060 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3061 arg0 = new1;
3062
3063 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3064 arg1 = new0;
3065 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3066 arg1 = new1;
3067
3068 return fold_build2_loc (loc, code, type, arg0, arg1);
3069 }
3070
3071 default:
3072 return arg;
3073 }
3074 }
3075 \f
3076 /* Return a tree for the case when the result of an expression is RESULT
3077 converted to TYPE and OMITTED was previously an operand of the expression
3078 but is now not needed (e.g., we folded OMITTED * 0).
3079
3080 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3081 the conversion of RESULT to TYPE. */
3082
3083 tree
3084 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3085 {
3086 tree t = fold_convert_loc (loc, type, result);
3087
3088 /* If the resulting operand is an empty statement, just return the omitted
3089 statement casted to void. */
3090 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3091 return build1_loc (loc, NOP_EXPR, void_type_node,
3092 fold_ignored_result (omitted));
3093
3094 if (TREE_SIDE_EFFECTS (omitted))
3095 return build2_loc (loc, COMPOUND_EXPR, type,
3096 fold_ignored_result (omitted), t);
3097
3098 return non_lvalue_loc (loc, t);
3099 }
3100
3101 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3102
3103 static tree
3104 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3105 tree omitted)
3106 {
3107 tree t = fold_convert_loc (loc, type, result);
3108
3109 /* If the resulting operand is an empty statement, just return the omitted
3110 statement casted to void. */
3111 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3112 return build1_loc (loc, NOP_EXPR, void_type_node,
3113 fold_ignored_result (omitted));
3114
3115 if (TREE_SIDE_EFFECTS (omitted))
3116 return build2_loc (loc, COMPOUND_EXPR, type,
3117 fold_ignored_result (omitted), t);
3118
3119 return pedantic_non_lvalue_loc (loc, t);
3120 }
3121
3122 /* Return a tree for the case when the result of an expression is RESULT
3123 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3124 of the expression but are now not needed.
3125
3126 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3127 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3128 evaluated before OMITTED2. Otherwise, if neither has side effects,
3129 just do the conversion of RESULT to TYPE. */
3130
3131 tree
3132 omit_two_operands_loc (location_t loc, tree type, tree result,
3133 tree omitted1, tree omitted2)
3134 {
3135 tree t = fold_convert_loc (loc, type, result);
3136
3137 if (TREE_SIDE_EFFECTS (omitted2))
3138 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3139 if (TREE_SIDE_EFFECTS (omitted1))
3140 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3141
3142 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3143 }
3144
3145 \f
3146 /* Return a simplified tree node for the truth-negation of ARG. This
3147 never alters ARG itself. We assume that ARG is an operation that
3148 returns a truth value (0 or 1).
3149
3150 FIXME: one would think we would fold the result, but it causes
3151 problems with the dominator optimizer. */
3152
3153 static tree
3154 fold_truth_not_expr (location_t loc, tree arg)
3155 {
3156 tree type = TREE_TYPE (arg);
3157 enum tree_code code = TREE_CODE (arg);
3158 location_t loc1, loc2;
3159
3160 /* If this is a comparison, we can simply invert it, except for
3161 floating-point non-equality comparisons, in which case we just
3162 enclose a TRUTH_NOT_EXPR around what we have. */
3163
3164 if (TREE_CODE_CLASS (code) == tcc_comparison)
3165 {
3166 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3167 if (FLOAT_TYPE_P (op_type)
3168 && flag_trapping_math
3169 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3170 && code != NE_EXPR && code != EQ_EXPR)
3171 return NULL_TREE;
3172
3173 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3174 if (code == ERROR_MARK)
3175 return NULL_TREE;
3176
3177 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3178 TREE_OPERAND (arg, 1));
3179 }
3180
3181 switch (code)
3182 {
3183 case INTEGER_CST:
3184 return constant_boolean_node (integer_zerop (arg), type);
3185
3186 case TRUTH_AND_EXPR:
3187 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3188 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3189 return build2_loc (loc, TRUTH_OR_EXPR, type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3191 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3192
3193 case TRUTH_OR_EXPR:
3194 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3195 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3196 return build2_loc (loc, TRUTH_AND_EXPR, type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3198 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3199
3200 case TRUTH_XOR_EXPR:
3201 /* Here we can invert either operand. We invert the first operand
3202 unless the second operand is a TRUTH_NOT_EXPR in which case our
3203 result is the XOR of the first operand with the inside of the
3204 negation of the second operand. */
3205
3206 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3207 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3208 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3209 else
3210 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3211 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3212 TREE_OPERAND (arg, 1));
3213
3214 case TRUTH_ANDIF_EXPR:
3215 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3216 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3217 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3218 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3219 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3220
3221 case TRUTH_ORIF_EXPR:
3222 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3223 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3224 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3226 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3227
3228 case TRUTH_NOT_EXPR:
3229 return TREE_OPERAND (arg, 0);
3230
3231 case COND_EXPR:
3232 {
3233 tree arg1 = TREE_OPERAND (arg, 1);
3234 tree arg2 = TREE_OPERAND (arg, 2);
3235
3236 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3237 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3238
3239 /* A COND_EXPR may have a throw as one operand, which
3240 then has void type. Just leave void operands
3241 as they are. */
3242 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3243 VOID_TYPE_P (TREE_TYPE (arg1))
3244 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3245 VOID_TYPE_P (TREE_TYPE (arg2))
3246 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3247 }
3248
3249 case COMPOUND_EXPR:
3250 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3251 return build2_loc (loc, COMPOUND_EXPR, type,
3252 TREE_OPERAND (arg, 0),
3253 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3254
3255 case NON_LVALUE_EXPR:
3256 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3257 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3258
3259 CASE_CONVERT:
3260 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3261 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3262
3263 /* ... fall through ... */
3264
3265 case FLOAT_EXPR:
3266 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3267 return build1_loc (loc, TREE_CODE (arg), type,
3268 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3269
3270 case BIT_AND_EXPR:
3271 if (!integer_onep (TREE_OPERAND (arg, 1)))
3272 return NULL_TREE;
3273 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3274
3275 case SAVE_EXPR:
3276 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3277
3278 case CLEANUP_POINT_EXPR:
3279 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3280 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3281 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3282
3283 default:
3284 return NULL_TREE;
3285 }
3286 }
3287
3288 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3289 assume that ARG is an operation that returns a truth value (0 or 1
3290 for scalars, 0 or -1 for vectors). Return the folded expression if
3291 folding is successful. Otherwise, return NULL_TREE. */
3292
3293 static tree
3294 fold_invert_truthvalue (location_t loc, tree arg)
3295 {
3296 tree type = TREE_TYPE (arg);
3297 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3298 ? BIT_NOT_EXPR
3299 : TRUTH_NOT_EXPR,
3300 type, arg);
3301 }
3302
3303 /* Return a simplified tree node for the truth-negation of ARG. This
3304 never alters ARG itself. We assume that ARG is an operation that
3305 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3306
3307 tree
3308 invert_truthvalue_loc (location_t loc, tree arg)
3309 {
3310 if (TREE_CODE (arg) == ERROR_MARK)
3311 return arg;
3312
3313 tree type = TREE_TYPE (arg);
3314 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3315 ? BIT_NOT_EXPR
3316 : TRUTH_NOT_EXPR,
3317 type, arg);
3318 }
3319
3320 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3321 operands are another bit-wise operation with a common input. If so,
3322 distribute the bit operations to save an operation and possibly two if
3323 constants are involved. For example, convert
3324 (A | B) & (A | C) into A | (B & C)
3325 Further simplification will occur if B and C are constants.
3326
3327 If this optimization cannot be done, 0 will be returned. */
3328
3329 static tree
3330 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3331 tree arg0, tree arg1)
3332 {
3333 tree common;
3334 tree left, right;
3335
3336 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3337 || TREE_CODE (arg0) == code
3338 || (TREE_CODE (arg0) != BIT_AND_EXPR
3339 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3340 return 0;
3341
3342 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3343 {
3344 common = TREE_OPERAND (arg0, 0);
3345 left = TREE_OPERAND (arg0, 1);
3346 right = TREE_OPERAND (arg1, 1);
3347 }
3348 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3349 {
3350 common = TREE_OPERAND (arg0, 0);
3351 left = TREE_OPERAND (arg0, 1);
3352 right = TREE_OPERAND (arg1, 0);
3353 }
3354 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3355 {
3356 common = TREE_OPERAND (arg0, 1);
3357 left = TREE_OPERAND (arg0, 0);
3358 right = TREE_OPERAND (arg1, 1);
3359 }
3360 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3361 {
3362 common = TREE_OPERAND (arg0, 1);
3363 left = TREE_OPERAND (arg0, 0);
3364 right = TREE_OPERAND (arg1, 0);
3365 }
3366 else
3367 return 0;
3368
3369 common = fold_convert_loc (loc, type, common);
3370 left = fold_convert_loc (loc, type, left);
3371 right = fold_convert_loc (loc, type, right);
3372 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3373 fold_build2_loc (loc, code, type, left, right));
3374 }
3375
3376 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3377 with code CODE. This optimization is unsafe. */
3378 static tree
3379 distribute_real_division (location_t loc, enum tree_code code, tree type,
3380 tree arg0, tree arg1)
3381 {
3382 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3383 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3384
3385 /* (A / C) +- (B / C) -> (A +- B) / C. */
3386 if (mul0 == mul1
3387 && operand_equal_p (TREE_OPERAND (arg0, 1),
3388 TREE_OPERAND (arg1, 1), 0))
3389 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3390 fold_build2_loc (loc, code, type,
3391 TREE_OPERAND (arg0, 0),
3392 TREE_OPERAND (arg1, 0)),
3393 TREE_OPERAND (arg0, 1));
3394
3395 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3396 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3397 TREE_OPERAND (arg1, 0), 0)
3398 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3399 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3400 {
3401 REAL_VALUE_TYPE r0, r1;
3402 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3403 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3404 if (!mul0)
3405 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3406 if (!mul1)
3407 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3408 real_arithmetic (&r0, code, &r0, &r1);
3409 return fold_build2_loc (loc, MULT_EXPR, type,
3410 TREE_OPERAND (arg0, 0),
3411 build_real (type, r0));
3412 }
3413
3414 return NULL_TREE;
3415 }
3416 \f
3417 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3418 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3419
3420 static tree
3421 make_bit_field_ref (location_t loc, tree inner, tree type,
3422 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3423 {
3424 tree result, bftype;
3425
3426 if (bitpos == 0)
3427 {
3428 tree size = TYPE_SIZE (TREE_TYPE (inner));
3429 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3430 || POINTER_TYPE_P (TREE_TYPE (inner)))
3431 && tree_fits_shwi_p (size)
3432 && tree_to_shwi (size) == bitsize)
3433 return fold_convert_loc (loc, type, inner);
3434 }
3435
3436 bftype = type;
3437 if (TYPE_PRECISION (bftype) != bitsize
3438 || TYPE_UNSIGNED (bftype) == !unsignedp)
3439 bftype = build_nonstandard_integer_type (bitsize, 0);
3440
3441 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3442 size_int (bitsize), bitsize_int (bitpos));
3443
3444 if (bftype != type)
3445 result = fold_convert_loc (loc, type, result);
3446
3447 return result;
3448 }
3449
3450 /* Optimize a bit-field compare.
3451
3452 There are two cases: First is a compare against a constant and the
3453 second is a comparison of two items where the fields are at the same
3454 bit position relative to the start of a chunk (byte, halfword, word)
3455 large enough to contain it. In these cases we can avoid the shift
3456 implicit in bitfield extractions.
3457
3458 For constants, we emit a compare of the shifted constant with the
3459 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3460 compared. For two fields at the same position, we do the ANDs with the
3461 similar mask and compare the result of the ANDs.
3462
3463 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3464 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3465 are the left and right operands of the comparison, respectively.
3466
3467 If the optimization described above can be done, we return the resulting
3468 tree. Otherwise we return zero. */
3469
3470 static tree
3471 optimize_bit_field_compare (location_t loc, enum tree_code code,
3472 tree compare_type, tree lhs, tree rhs)
3473 {
3474 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3475 tree type = TREE_TYPE (lhs);
3476 tree signed_type, unsigned_type;
3477 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3478 enum machine_mode lmode, rmode, nmode;
3479 int lunsignedp, runsignedp;
3480 int lvolatilep = 0, rvolatilep = 0;
3481 tree linner, rinner = NULL_TREE;
3482 tree mask;
3483 tree offset;
3484
3485 /* Get all the information about the extractions being done. If the bit size
3486 if the same as the size of the underlying object, we aren't doing an
3487 extraction at all and so can do nothing. We also don't want to
3488 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3489 then will no longer be able to replace it. */
3490 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3491 &lunsignedp, &lvolatilep);
3492 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3493 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3494 return 0;
3495
3496 if (!const_p)
3497 {
3498 /* If this is not a constant, we can only do something if bit positions,
3499 sizes, and signedness are the same. */
3500 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3501 &runsignedp, &rvolatilep);
3502
3503 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3504 || lunsignedp != runsignedp || offset != 0
3505 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3506 return 0;
3507 }
3508
3509 /* See if we can find a mode to refer to this field. We should be able to,
3510 but fail if we can't. */
3511 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3512 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3513 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3514 TYPE_ALIGN (TREE_TYPE (rinner))),
3515 word_mode, false);
3516 if (nmode == VOIDmode)
3517 return 0;
3518
3519 /* Set signed and unsigned types of the precision of this mode for the
3520 shifts below. */
3521 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3522 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3523
3524 /* Compute the bit position and size for the new reference and our offset
3525 within it. If the new reference is the same size as the original, we
3526 won't optimize anything, so return zero. */
3527 nbitsize = GET_MODE_BITSIZE (nmode);
3528 nbitpos = lbitpos & ~ (nbitsize - 1);
3529 lbitpos -= nbitpos;
3530 if (nbitsize == lbitsize)
3531 return 0;
3532
3533 if (BYTES_BIG_ENDIAN)
3534 lbitpos = nbitsize - lbitsize - lbitpos;
3535
3536 /* Make the mask to be used against the extracted field. */
3537 mask = build_int_cst_type (unsigned_type, -1);
3538 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3539 mask = const_binop (RSHIFT_EXPR, mask,
3540 size_int (nbitsize - lbitsize - lbitpos));
3541
3542 if (! const_p)
3543 /* If not comparing with constant, just rework the comparison
3544 and return. */
3545 return fold_build2_loc (loc, code, compare_type,
3546 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3547 make_bit_field_ref (loc, linner,
3548 unsigned_type,
3549 nbitsize, nbitpos,
3550 1),
3551 mask),
3552 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3553 make_bit_field_ref (loc, rinner,
3554 unsigned_type,
3555 nbitsize, nbitpos,
3556 1),
3557 mask));
3558
3559 /* Otherwise, we are handling the constant case. See if the constant is too
3560 big for the field. Warn and return a tree of for 0 (false) if so. We do
3561 this not only for its own sake, but to avoid having to test for this
3562 error case below. If we didn't, we might generate wrong code.
3563
3564 For unsigned fields, the constant shifted right by the field length should
3565 be all zero. For signed fields, the high-order bits should agree with
3566 the sign bit. */
3567
3568 if (lunsignedp)
3569 {
3570 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3571 fold_convert_loc (loc,
3572 unsigned_type, rhs),
3573 size_int (lbitsize))))
3574 {
3575 warning (0, "comparison is always %d due to width of bit-field",
3576 code == NE_EXPR);
3577 return constant_boolean_node (code == NE_EXPR, compare_type);
3578 }
3579 }
3580 else
3581 {
3582 tree tem = const_binop (RSHIFT_EXPR,
3583 fold_convert_loc (loc, signed_type, rhs),
3584 size_int (lbitsize - 1));
3585 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3586 {
3587 warning (0, "comparison is always %d due to width of bit-field",
3588 code == NE_EXPR);
3589 return constant_boolean_node (code == NE_EXPR, compare_type);
3590 }
3591 }
3592
3593 /* Single-bit compares should always be against zero. */
3594 if (lbitsize == 1 && ! integer_zerop (rhs))
3595 {
3596 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3597 rhs = build_int_cst (type, 0);
3598 }
3599
3600 /* Make a new bitfield reference, shift the constant over the
3601 appropriate number of bits and mask it with the computed mask
3602 (in case this was a signed field). If we changed it, make a new one. */
3603 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3604
3605 rhs = const_binop (BIT_AND_EXPR,
3606 const_binop (LSHIFT_EXPR,
3607 fold_convert_loc (loc, unsigned_type, rhs),
3608 size_int (lbitpos)),
3609 mask);
3610
3611 lhs = build2_loc (loc, code, compare_type,
3612 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3613 return lhs;
3614 }
3615 \f
3616 /* Subroutine for fold_truth_andor_1: decode a field reference.
3617
3618 If EXP is a comparison reference, we return the innermost reference.
3619
3620 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3621 set to the starting bit number.
3622
3623 If the innermost field can be completely contained in a mode-sized
3624 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3625
3626 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3627 otherwise it is not changed.
3628
3629 *PUNSIGNEDP is set to the signedness of the field.
3630
3631 *PMASK is set to the mask used. This is either contained in a
3632 BIT_AND_EXPR or derived from the width of the field.
3633
3634 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3635
3636 Return 0 if this is not a component reference or is one that we can't
3637 do anything with. */
3638
3639 static tree
3640 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3641 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3642 int *punsignedp, int *pvolatilep,
3643 tree *pmask, tree *pand_mask)
3644 {
3645 tree outer_type = 0;
3646 tree and_mask = 0;
3647 tree mask, inner, offset;
3648 tree unsigned_type;
3649 unsigned int precision;
3650
3651 /* All the optimizations using this function assume integer fields.
3652 There are problems with FP fields since the type_for_size call
3653 below can fail for, e.g., XFmode. */
3654 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3655 return 0;
3656
3657 /* We are interested in the bare arrangement of bits, so strip everything
3658 that doesn't affect the machine mode. However, record the type of the
3659 outermost expression if it may matter below. */
3660 if (CONVERT_EXPR_P (exp)
3661 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3662 outer_type = TREE_TYPE (exp);
3663 STRIP_NOPS (exp);
3664
3665 if (TREE_CODE (exp) == BIT_AND_EXPR)
3666 {
3667 and_mask = TREE_OPERAND (exp, 1);
3668 exp = TREE_OPERAND (exp, 0);
3669 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3670 if (TREE_CODE (and_mask) != INTEGER_CST)
3671 return 0;
3672 }
3673
3674 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3675 punsignedp, pvolatilep);
3676 if ((inner == exp && and_mask == 0)
3677 || *pbitsize < 0 || offset != 0
3678 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3679 return 0;
3680
3681 /* If the number of bits in the reference is the same as the bitsize of
3682 the outer type, then the outer type gives the signedness. Otherwise
3683 (in case of a small bitfield) the signedness is unchanged. */
3684 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3685 *punsignedp = TYPE_UNSIGNED (outer_type);
3686
3687 /* Compute the mask to access the bitfield. */
3688 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3689 precision = TYPE_PRECISION (unsigned_type);
3690
3691 mask = build_int_cst_type (unsigned_type, -1);
3692
3693 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3694 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3695
3696 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3697 if (and_mask != 0)
3698 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3699 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3700
3701 *pmask = mask;
3702 *pand_mask = and_mask;
3703 return inner;
3704 }
3705
3706 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3707 bit positions. */
3708
3709 static int
3710 all_ones_mask_p (const_tree mask, int size)
3711 {
3712 tree type = TREE_TYPE (mask);
3713 unsigned int precision = TYPE_PRECISION (type);
3714 tree tmask;
3715
3716 tmask = build_int_cst_type (signed_type_for (type), -1);
3717
3718 return
3719 tree_int_cst_equal (mask,
3720 const_binop (RSHIFT_EXPR,
3721 const_binop (LSHIFT_EXPR, tmask,
3722 size_int (precision - size)),
3723 size_int (precision - size)));
3724 }
3725
3726 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3727 represents the sign bit of EXP's type. If EXP represents a sign
3728 or zero extension, also test VAL against the unextended type.
3729 The return value is the (sub)expression whose sign bit is VAL,
3730 or NULL_TREE otherwise. */
3731
3732 static tree
3733 sign_bit_p (tree exp, const_tree val)
3734 {
3735 unsigned HOST_WIDE_INT mask_lo, lo;
3736 HOST_WIDE_INT mask_hi, hi;
3737 int width;
3738 tree t;
3739
3740 /* Tree EXP must have an integral type. */
3741 t = TREE_TYPE (exp);
3742 if (! INTEGRAL_TYPE_P (t))
3743 return NULL_TREE;
3744
3745 /* Tree VAL must be an integer constant. */
3746 if (TREE_CODE (val) != INTEGER_CST
3747 || TREE_OVERFLOW (val))
3748 return NULL_TREE;
3749
3750 width = TYPE_PRECISION (t);
3751 if (width > HOST_BITS_PER_WIDE_INT)
3752 {
3753 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3754 lo = 0;
3755
3756 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3757 mask_lo = -1;
3758 }
3759 else
3760 {
3761 hi = 0;
3762 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3763
3764 mask_hi = 0;
3765 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3766 }
3767
3768 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3769 treat VAL as if it were unsigned. */
3770 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3771 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3772 return exp;
3773
3774 /* Handle extension from a narrower type. */
3775 if (TREE_CODE (exp) == NOP_EXPR
3776 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3777 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3778
3779 return NULL_TREE;
3780 }
3781
3782 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3783 to be evaluated unconditionally. */
3784
3785 static int
3786 simple_operand_p (const_tree exp)
3787 {
3788 /* Strip any conversions that don't change the machine mode. */
3789 STRIP_NOPS (exp);
3790
3791 return (CONSTANT_CLASS_P (exp)
3792 || TREE_CODE (exp) == SSA_NAME
3793 || (DECL_P (exp)
3794 && ! TREE_ADDRESSABLE (exp)
3795 && ! TREE_THIS_VOLATILE (exp)
3796 && ! DECL_NONLOCAL (exp)
3797 /* Don't regard global variables as simple. They may be
3798 allocated in ways unknown to the compiler (shared memory,
3799 #pragma weak, etc). */
3800 && ! TREE_PUBLIC (exp)
3801 && ! DECL_EXTERNAL (exp)
3802 /* Weakrefs are not safe to be read, since they can be NULL.
3803 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3804 have DECL_WEAK flag set. */
3805 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3806 /* Loading a static variable is unduly expensive, but global
3807 registers aren't expensive. */
3808 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3809 }
3810
3811 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3812 to be evaluated unconditionally.
3813 I addition to simple_operand_p, we assume that comparisons, conversions,
3814 and logic-not operations are simple, if their operands are simple, too. */
3815
3816 static bool
3817 simple_operand_p_2 (tree exp)
3818 {
3819 enum tree_code code;
3820
3821 if (TREE_SIDE_EFFECTS (exp)
3822 || tree_could_trap_p (exp))
3823 return false;
3824
3825 while (CONVERT_EXPR_P (exp))
3826 exp = TREE_OPERAND (exp, 0);
3827
3828 code = TREE_CODE (exp);
3829
3830 if (TREE_CODE_CLASS (code) == tcc_comparison)
3831 return (simple_operand_p (TREE_OPERAND (exp, 0))
3832 && simple_operand_p (TREE_OPERAND (exp, 1)));
3833
3834 if (code == TRUTH_NOT_EXPR)
3835 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3836
3837 return simple_operand_p (exp);
3838 }
3839
3840 \f
3841 /* The following functions are subroutines to fold_range_test and allow it to
3842 try to change a logical combination of comparisons into a range test.
3843
3844 For example, both
3845 X == 2 || X == 3 || X == 4 || X == 5
3846 and
3847 X >= 2 && X <= 5
3848 are converted to
3849 (unsigned) (X - 2) <= 3
3850
3851 We describe each set of comparisons as being either inside or outside
3852 a range, using a variable named like IN_P, and then describe the
3853 range with a lower and upper bound. If one of the bounds is omitted,
3854 it represents either the highest or lowest value of the type.
3855
3856 In the comments below, we represent a range by two numbers in brackets
3857 preceded by a "+" to designate being inside that range, or a "-" to
3858 designate being outside that range, so the condition can be inverted by
3859 flipping the prefix. An omitted bound is represented by a "-". For
3860 example, "- [-, 10]" means being outside the range starting at the lowest
3861 possible value and ending at 10, in other words, being greater than 10.
3862 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3863 always false.
3864
3865 We set up things so that the missing bounds are handled in a consistent
3866 manner so neither a missing bound nor "true" and "false" need to be
3867 handled using a special case. */
3868
3869 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3870 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3871 and UPPER1_P are nonzero if the respective argument is an upper bound
3872 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3873 must be specified for a comparison. ARG1 will be converted to ARG0's
3874 type if both are specified. */
3875
3876 static tree
3877 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3878 tree arg1, int upper1_p)
3879 {
3880 tree tem;
3881 int result;
3882 int sgn0, sgn1;
3883
3884 /* If neither arg represents infinity, do the normal operation.
3885 Else, if not a comparison, return infinity. Else handle the special
3886 comparison rules. Note that most of the cases below won't occur, but
3887 are handled for consistency. */
3888
3889 if (arg0 != 0 && arg1 != 0)
3890 {
3891 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3892 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3893 STRIP_NOPS (tem);
3894 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3895 }
3896
3897 if (TREE_CODE_CLASS (code) != tcc_comparison)
3898 return 0;
3899
3900 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3901 for neither. In real maths, we cannot assume open ended ranges are
3902 the same. But, this is computer arithmetic, where numbers are finite.
3903 We can therefore make the transformation of any unbounded range with
3904 the value Z, Z being greater than any representable number. This permits
3905 us to treat unbounded ranges as equal. */
3906 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3907 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3908 switch (code)
3909 {
3910 case EQ_EXPR:
3911 result = sgn0 == sgn1;
3912 break;
3913 case NE_EXPR:
3914 result = sgn0 != sgn1;
3915 break;
3916 case LT_EXPR:
3917 result = sgn0 < sgn1;
3918 break;
3919 case LE_EXPR:
3920 result = sgn0 <= sgn1;
3921 break;
3922 case GT_EXPR:
3923 result = sgn0 > sgn1;
3924 break;
3925 case GE_EXPR:
3926 result = sgn0 >= sgn1;
3927 break;
3928 default:
3929 gcc_unreachable ();
3930 }
3931
3932 return constant_boolean_node (result, type);
3933 }
3934 \f
3935 /* Helper routine for make_range. Perform one step for it, return
3936 new expression if the loop should continue or NULL_TREE if it should
3937 stop. */
3938
3939 tree
3940 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3941 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3942 bool *strict_overflow_p)
3943 {
3944 tree arg0_type = TREE_TYPE (arg0);
3945 tree n_low, n_high, low = *p_low, high = *p_high;
3946 int in_p = *p_in_p, n_in_p;
3947
3948 switch (code)
3949 {
3950 case TRUTH_NOT_EXPR:
3951 /* We can only do something if the range is testing for zero. */
3952 if (low == NULL_TREE || high == NULL_TREE
3953 || ! integer_zerop (low) || ! integer_zerop (high))
3954 return NULL_TREE;
3955 *p_in_p = ! in_p;
3956 return arg0;
3957
3958 case EQ_EXPR: case NE_EXPR:
3959 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3960 /* We can only do something if the range is testing for zero
3961 and if the second operand is an integer constant. Note that
3962 saying something is "in" the range we make is done by
3963 complementing IN_P since it will set in the initial case of
3964 being not equal to zero; "out" is leaving it alone. */
3965 if (low == NULL_TREE || high == NULL_TREE
3966 || ! integer_zerop (low) || ! integer_zerop (high)
3967 || TREE_CODE (arg1) != INTEGER_CST)
3968 return NULL_TREE;
3969
3970 switch (code)
3971 {
3972 case NE_EXPR: /* - [c, c] */
3973 low = high = arg1;
3974 break;
3975 case EQ_EXPR: /* + [c, c] */
3976 in_p = ! in_p, low = high = arg1;
3977 break;
3978 case GT_EXPR: /* - [-, c] */
3979 low = 0, high = arg1;
3980 break;
3981 case GE_EXPR: /* + [c, -] */
3982 in_p = ! in_p, low = arg1, high = 0;
3983 break;
3984 case LT_EXPR: /* - [c, -] */
3985 low = arg1, high = 0;
3986 break;
3987 case LE_EXPR: /* + [-, c] */
3988 in_p = ! in_p, low = 0, high = arg1;
3989 break;
3990 default:
3991 gcc_unreachable ();
3992 }
3993
3994 /* If this is an unsigned comparison, we also know that EXP is
3995 greater than or equal to zero. We base the range tests we make
3996 on that fact, so we record it here so we can parse existing
3997 range tests. We test arg0_type since often the return type
3998 of, e.g. EQ_EXPR, is boolean. */
3999 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4000 {
4001 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4002 in_p, low, high, 1,
4003 build_int_cst (arg0_type, 0),
4004 NULL_TREE))
4005 return NULL_TREE;
4006
4007 in_p = n_in_p, low = n_low, high = n_high;
4008
4009 /* If the high bound is missing, but we have a nonzero low
4010 bound, reverse the range so it goes from zero to the low bound
4011 minus 1. */
4012 if (high == 0 && low && ! integer_zerop (low))
4013 {
4014 in_p = ! in_p;
4015 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4016 integer_one_node, 0);
4017 low = build_int_cst (arg0_type, 0);
4018 }
4019 }
4020
4021 *p_low = low;
4022 *p_high = high;
4023 *p_in_p = in_p;
4024 return arg0;
4025
4026 case NEGATE_EXPR:
4027 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4028 low and high are non-NULL, then normalize will DTRT. */
4029 if (!TYPE_UNSIGNED (arg0_type)
4030 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4031 {
4032 if (low == NULL_TREE)
4033 low = TYPE_MIN_VALUE (arg0_type);
4034 if (high == NULL_TREE)
4035 high = TYPE_MAX_VALUE (arg0_type);
4036 }
4037
4038 /* (-x) IN [a,b] -> x in [-b, -a] */
4039 n_low = range_binop (MINUS_EXPR, exp_type,
4040 build_int_cst (exp_type, 0),
4041 0, high, 1);
4042 n_high = range_binop (MINUS_EXPR, exp_type,
4043 build_int_cst (exp_type, 0),
4044 0, low, 0);
4045 if (n_high != 0 && TREE_OVERFLOW (n_high))
4046 return NULL_TREE;
4047 goto normalize;
4048
4049 case BIT_NOT_EXPR:
4050 /* ~ X -> -X - 1 */
4051 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4052 build_int_cst (exp_type, 1));
4053
4054 case PLUS_EXPR:
4055 case MINUS_EXPR:
4056 if (TREE_CODE (arg1) != INTEGER_CST)
4057 return NULL_TREE;
4058
4059 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4060 move a constant to the other side. */
4061 if (!TYPE_UNSIGNED (arg0_type)
4062 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4063 return NULL_TREE;
4064
4065 /* If EXP is signed, any overflow in the computation is undefined,
4066 so we don't worry about it so long as our computations on
4067 the bounds don't overflow. For unsigned, overflow is defined
4068 and this is exactly the right thing. */
4069 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4070 arg0_type, low, 0, arg1, 0);
4071 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4072 arg0_type, high, 1, arg1, 0);
4073 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4074 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4075 return NULL_TREE;
4076
4077 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4078 *strict_overflow_p = true;
4079
4080 normalize:
4081 /* Check for an unsigned range which has wrapped around the maximum
4082 value thus making n_high < n_low, and normalize it. */
4083 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4084 {
4085 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4086 integer_one_node, 0);
4087 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4088 integer_one_node, 0);
4089
4090 /* If the range is of the form +/- [ x+1, x ], we won't
4091 be able to normalize it. But then, it represents the
4092 whole range or the empty set, so make it
4093 +/- [ -, - ]. */
4094 if (tree_int_cst_equal (n_low, low)
4095 && tree_int_cst_equal (n_high, high))
4096 low = high = 0;
4097 else
4098 in_p = ! in_p;
4099 }
4100 else
4101 low = n_low, high = n_high;
4102
4103 *p_low = low;
4104 *p_high = high;
4105 *p_in_p = in_p;
4106 return arg0;
4107
4108 CASE_CONVERT:
4109 case NON_LVALUE_EXPR:
4110 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4111 return NULL_TREE;
4112
4113 if (! INTEGRAL_TYPE_P (arg0_type)
4114 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4115 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4116 return NULL_TREE;
4117
4118 n_low = low, n_high = high;
4119
4120 if (n_low != 0)
4121 n_low = fold_convert_loc (loc, arg0_type, n_low);
4122
4123 if (n_high != 0)
4124 n_high = fold_convert_loc (loc, arg0_type, n_high);
4125
4126 /* If we're converting arg0 from an unsigned type, to exp,
4127 a signed type, we will be doing the comparison as unsigned.
4128 The tests above have already verified that LOW and HIGH
4129 are both positive.
4130
4131 So we have to ensure that we will handle large unsigned
4132 values the same way that the current signed bounds treat
4133 negative values. */
4134
4135 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4136 {
4137 tree high_positive;
4138 tree equiv_type;
4139 /* For fixed-point modes, we need to pass the saturating flag
4140 as the 2nd parameter. */
4141 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4142 equiv_type
4143 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4144 TYPE_SATURATING (arg0_type));
4145 else
4146 equiv_type
4147 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4148
4149 /* A range without an upper bound is, naturally, unbounded.
4150 Since convert would have cropped a very large value, use
4151 the max value for the destination type. */
4152 high_positive
4153 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4154 : TYPE_MAX_VALUE (arg0_type);
4155
4156 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4157 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4158 fold_convert_loc (loc, arg0_type,
4159 high_positive),
4160 build_int_cst (arg0_type, 1));
4161
4162 /* If the low bound is specified, "and" the range with the
4163 range for which the original unsigned value will be
4164 positive. */
4165 if (low != 0)
4166 {
4167 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4168 1, fold_convert_loc (loc, arg0_type,
4169 integer_zero_node),
4170 high_positive))
4171 return NULL_TREE;
4172
4173 in_p = (n_in_p == in_p);
4174 }
4175 else
4176 {
4177 /* Otherwise, "or" the range with the range of the input
4178 that will be interpreted as negative. */
4179 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4180 1, fold_convert_loc (loc, arg0_type,
4181 integer_zero_node),
4182 high_positive))
4183 return NULL_TREE;
4184
4185 in_p = (in_p != n_in_p);
4186 }
4187 }
4188
4189 *p_low = n_low;
4190 *p_high = n_high;
4191 *p_in_p = in_p;
4192 return arg0;
4193
4194 default:
4195 return NULL_TREE;
4196 }
4197 }
4198
4199 /* Given EXP, a logical expression, set the range it is testing into
4200 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4201 actually being tested. *PLOW and *PHIGH will be made of the same
4202 type as the returned expression. If EXP is not a comparison, we
4203 will most likely not be returning a useful value and range. Set
4204 *STRICT_OVERFLOW_P to true if the return value is only valid
4205 because signed overflow is undefined; otherwise, do not change
4206 *STRICT_OVERFLOW_P. */
4207
4208 tree
4209 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4210 bool *strict_overflow_p)
4211 {
4212 enum tree_code code;
4213 tree arg0, arg1 = NULL_TREE;
4214 tree exp_type, nexp;
4215 int in_p;
4216 tree low, high;
4217 location_t loc = EXPR_LOCATION (exp);
4218
4219 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4220 and see if we can refine the range. Some of the cases below may not
4221 happen, but it doesn't seem worth worrying about this. We "continue"
4222 the outer loop when we've changed something; otherwise we "break"
4223 the switch, which will "break" the while. */
4224
4225 in_p = 0;
4226 low = high = build_int_cst (TREE_TYPE (exp), 0);
4227
4228 while (1)
4229 {
4230 code = TREE_CODE (exp);
4231 exp_type = TREE_TYPE (exp);
4232 arg0 = NULL_TREE;
4233
4234 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4235 {
4236 if (TREE_OPERAND_LENGTH (exp) > 0)
4237 arg0 = TREE_OPERAND (exp, 0);
4238 if (TREE_CODE_CLASS (code) == tcc_binary
4239 || TREE_CODE_CLASS (code) == tcc_comparison
4240 || (TREE_CODE_CLASS (code) == tcc_expression
4241 && TREE_OPERAND_LENGTH (exp) > 1))
4242 arg1 = TREE_OPERAND (exp, 1);
4243 }
4244 if (arg0 == NULL_TREE)
4245 break;
4246
4247 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4248 &high, &in_p, strict_overflow_p);
4249 if (nexp == NULL_TREE)
4250 break;
4251 exp = nexp;
4252 }
4253
4254 /* If EXP is a constant, we can evaluate whether this is true or false. */
4255 if (TREE_CODE (exp) == INTEGER_CST)
4256 {
4257 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4258 exp, 0, low, 0))
4259 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4260 exp, 1, high, 1)));
4261 low = high = 0;
4262 exp = 0;
4263 }
4264
4265 *pin_p = in_p, *plow = low, *phigh = high;
4266 return exp;
4267 }
4268 \f
4269 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4270 type, TYPE, return an expression to test if EXP is in (or out of, depending
4271 on IN_P) the range. Return 0 if the test couldn't be created. */
4272
4273 tree
4274 build_range_check (location_t loc, tree type, tree exp, int in_p,
4275 tree low, tree high)
4276 {
4277 tree etype = TREE_TYPE (exp), value;
4278
4279 #ifdef HAVE_canonicalize_funcptr_for_compare
4280 /* Disable this optimization for function pointer expressions
4281 on targets that require function pointer canonicalization. */
4282 if (HAVE_canonicalize_funcptr_for_compare
4283 && TREE_CODE (etype) == POINTER_TYPE
4284 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4285 return NULL_TREE;
4286 #endif
4287
4288 if (! in_p)
4289 {
4290 value = build_range_check (loc, type, exp, 1, low, high);
4291 if (value != 0)
4292 return invert_truthvalue_loc (loc, value);
4293
4294 return 0;
4295 }
4296
4297 if (low == 0 && high == 0)
4298 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4299
4300 if (low == 0)
4301 return fold_build2_loc (loc, LE_EXPR, type, exp,
4302 fold_convert_loc (loc, etype, high));
4303
4304 if (high == 0)
4305 return fold_build2_loc (loc, GE_EXPR, type, exp,
4306 fold_convert_loc (loc, etype, low));
4307
4308 if (operand_equal_p (low, high, 0))
4309 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4310 fold_convert_loc (loc, etype, low));
4311
4312 if (integer_zerop (low))
4313 {
4314 if (! TYPE_UNSIGNED (etype))
4315 {
4316 etype = unsigned_type_for (etype);
4317 high = fold_convert_loc (loc, etype, high);
4318 exp = fold_convert_loc (loc, etype, exp);
4319 }
4320 return build_range_check (loc, type, exp, 1, 0, high);
4321 }
4322
4323 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4324 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4325 {
4326 unsigned HOST_WIDE_INT lo;
4327 HOST_WIDE_INT hi;
4328 int prec;
4329
4330 prec = TYPE_PRECISION (etype);
4331 if (prec <= HOST_BITS_PER_WIDE_INT)
4332 {
4333 hi = 0;
4334 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4335 }
4336 else
4337 {
4338 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4339 lo = HOST_WIDE_INT_M1U;
4340 }
4341
4342 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4343 {
4344 if (TYPE_UNSIGNED (etype))
4345 {
4346 tree signed_etype = signed_type_for (etype);
4347 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4348 etype
4349 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4350 else
4351 etype = signed_etype;
4352 exp = fold_convert_loc (loc, etype, exp);
4353 }
4354 return fold_build2_loc (loc, GT_EXPR, type, exp,
4355 build_int_cst (etype, 0));
4356 }
4357 }
4358
4359 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4360 This requires wrap-around arithmetics for the type of the expression.
4361 First make sure that arithmetics in this type is valid, then make sure
4362 that it wraps around. */
4363 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4364 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4365 TYPE_UNSIGNED (etype));
4366
4367 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4368 {
4369 tree utype, minv, maxv;
4370
4371 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4372 for the type in question, as we rely on this here. */
4373 utype = unsigned_type_for (etype);
4374 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4375 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4376 integer_one_node, 1);
4377 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4378
4379 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4380 minv, 1, maxv, 1)))
4381 etype = utype;
4382 else
4383 return 0;
4384 }
4385
4386 high = fold_convert_loc (loc, etype, high);
4387 low = fold_convert_loc (loc, etype, low);
4388 exp = fold_convert_loc (loc, etype, exp);
4389
4390 value = const_binop (MINUS_EXPR, high, low);
4391
4392
4393 if (POINTER_TYPE_P (etype))
4394 {
4395 if (value != 0 && !TREE_OVERFLOW (value))
4396 {
4397 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4398 return build_range_check (loc, type,
4399 fold_build_pointer_plus_loc (loc, exp, low),
4400 1, build_int_cst (etype, 0), value);
4401 }
4402 return 0;
4403 }
4404
4405 if (value != 0 && !TREE_OVERFLOW (value))
4406 return build_range_check (loc, type,
4407 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4408 1, build_int_cst (etype, 0), value);
4409
4410 return 0;
4411 }
4412 \f
4413 /* Return the predecessor of VAL in its type, handling the infinite case. */
4414
4415 static tree
4416 range_predecessor (tree val)
4417 {
4418 tree type = TREE_TYPE (val);
4419
4420 if (INTEGRAL_TYPE_P (type)
4421 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4422 return 0;
4423 else
4424 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4425 }
4426
4427 /* Return the successor of VAL in its type, handling the infinite case. */
4428
4429 static tree
4430 range_successor (tree val)
4431 {
4432 tree type = TREE_TYPE (val);
4433
4434 if (INTEGRAL_TYPE_P (type)
4435 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4436 return 0;
4437 else
4438 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4439 }
4440
4441 /* Given two ranges, see if we can merge them into one. Return 1 if we
4442 can, 0 if we can't. Set the output range into the specified parameters. */
4443
4444 bool
4445 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4446 tree high0, int in1_p, tree low1, tree high1)
4447 {
4448 int no_overlap;
4449 int subset;
4450 int temp;
4451 tree tem;
4452 int in_p;
4453 tree low, high;
4454 int lowequal = ((low0 == 0 && low1 == 0)
4455 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4456 low0, 0, low1, 0)));
4457 int highequal = ((high0 == 0 && high1 == 0)
4458 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4459 high0, 1, high1, 1)));
4460
4461 /* Make range 0 be the range that starts first, or ends last if they
4462 start at the same value. Swap them if it isn't. */
4463 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4464 low0, 0, low1, 0))
4465 || (lowequal
4466 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4467 high1, 1, high0, 1))))
4468 {
4469 temp = in0_p, in0_p = in1_p, in1_p = temp;
4470 tem = low0, low0 = low1, low1 = tem;
4471 tem = high0, high0 = high1, high1 = tem;
4472 }
4473
4474 /* Now flag two cases, whether the ranges are disjoint or whether the
4475 second range is totally subsumed in the first. Note that the tests
4476 below are simplified by the ones above. */
4477 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4478 high0, 1, low1, 0));
4479 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4480 high1, 1, high0, 1));
4481
4482 /* We now have four cases, depending on whether we are including or
4483 excluding the two ranges. */
4484 if (in0_p && in1_p)
4485 {
4486 /* If they don't overlap, the result is false. If the second range
4487 is a subset it is the result. Otherwise, the range is from the start
4488 of the second to the end of the first. */
4489 if (no_overlap)
4490 in_p = 0, low = high = 0;
4491 else if (subset)
4492 in_p = 1, low = low1, high = high1;
4493 else
4494 in_p = 1, low = low1, high = high0;
4495 }
4496
4497 else if (in0_p && ! in1_p)
4498 {
4499 /* If they don't overlap, the result is the first range. If they are
4500 equal, the result is false. If the second range is a subset of the
4501 first, and the ranges begin at the same place, we go from just after
4502 the end of the second range to the end of the first. If the second
4503 range is not a subset of the first, or if it is a subset and both
4504 ranges end at the same place, the range starts at the start of the
4505 first range and ends just before the second range.
4506 Otherwise, we can't describe this as a single range. */
4507 if (no_overlap)
4508 in_p = 1, low = low0, high = high0;
4509 else if (lowequal && highequal)
4510 in_p = 0, low = high = 0;
4511 else if (subset && lowequal)
4512 {
4513 low = range_successor (high1);
4514 high = high0;
4515 in_p = 1;
4516 if (low == 0)
4517 {
4518 /* We are in the weird situation where high0 > high1 but
4519 high1 has no successor. Punt. */
4520 return 0;
4521 }
4522 }
4523 else if (! subset || highequal)
4524 {
4525 low = low0;
4526 high = range_predecessor (low1);
4527 in_p = 1;
4528 if (high == 0)
4529 {
4530 /* low0 < low1 but low1 has no predecessor. Punt. */
4531 return 0;
4532 }
4533 }
4534 else
4535 return 0;
4536 }
4537
4538 else if (! in0_p && in1_p)
4539 {
4540 /* If they don't overlap, the result is the second range. If the second
4541 is a subset of the first, the result is false. Otherwise,
4542 the range starts just after the first range and ends at the
4543 end of the second. */
4544 if (no_overlap)
4545 in_p = 1, low = low1, high = high1;
4546 else if (subset || highequal)
4547 in_p = 0, low = high = 0;
4548 else
4549 {
4550 low = range_successor (high0);
4551 high = high1;
4552 in_p = 1;
4553 if (low == 0)
4554 {
4555 /* high1 > high0 but high0 has no successor. Punt. */
4556 return 0;
4557 }
4558 }
4559 }
4560
4561 else
4562 {
4563 /* The case where we are excluding both ranges. Here the complex case
4564 is if they don't overlap. In that case, the only time we have a
4565 range is if they are adjacent. If the second is a subset of the
4566 first, the result is the first. Otherwise, the range to exclude
4567 starts at the beginning of the first range and ends at the end of the
4568 second. */
4569 if (no_overlap)
4570 {
4571 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4572 range_successor (high0),
4573 1, low1, 0)))
4574 in_p = 0, low = low0, high = high1;
4575 else
4576 {
4577 /* Canonicalize - [min, x] into - [-, x]. */
4578 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4579 switch (TREE_CODE (TREE_TYPE (low0)))
4580 {
4581 case ENUMERAL_TYPE:
4582 if (TYPE_PRECISION (TREE_TYPE (low0))
4583 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4584 break;
4585 /* FALLTHROUGH */
4586 case INTEGER_TYPE:
4587 if (tree_int_cst_equal (low0,
4588 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4589 low0 = 0;
4590 break;
4591 case POINTER_TYPE:
4592 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4593 && integer_zerop (low0))
4594 low0 = 0;
4595 break;
4596 default:
4597 break;
4598 }
4599
4600 /* Canonicalize - [x, max] into - [x, -]. */
4601 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4602 switch (TREE_CODE (TREE_TYPE (high1)))
4603 {
4604 case ENUMERAL_TYPE:
4605 if (TYPE_PRECISION (TREE_TYPE (high1))
4606 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4607 break;
4608 /* FALLTHROUGH */
4609 case INTEGER_TYPE:
4610 if (tree_int_cst_equal (high1,
4611 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4612 high1 = 0;
4613 break;
4614 case POINTER_TYPE:
4615 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4616 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4617 high1, 1,
4618 integer_one_node, 1)))
4619 high1 = 0;
4620 break;
4621 default:
4622 break;
4623 }
4624
4625 /* The ranges might be also adjacent between the maximum and
4626 minimum values of the given type. For
4627 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4628 return + [x + 1, y - 1]. */
4629 if (low0 == 0 && high1 == 0)
4630 {
4631 low = range_successor (high0);
4632 high = range_predecessor (low1);
4633 if (low == 0 || high == 0)
4634 return 0;
4635
4636 in_p = 1;
4637 }
4638 else
4639 return 0;
4640 }
4641 }
4642 else if (subset)
4643 in_p = 0, low = low0, high = high0;
4644 else
4645 in_p = 0, low = low0, high = high1;
4646 }
4647
4648 *pin_p = in_p, *plow = low, *phigh = high;
4649 return 1;
4650 }
4651 \f
4652
4653 /* Subroutine of fold, looking inside expressions of the form
4654 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4655 of the COND_EXPR. This function is being used also to optimize
4656 A op B ? C : A, by reversing the comparison first.
4657
4658 Return a folded expression whose code is not a COND_EXPR
4659 anymore, or NULL_TREE if no folding opportunity is found. */
4660
4661 static tree
4662 fold_cond_expr_with_comparison (location_t loc, tree type,
4663 tree arg0, tree arg1, tree arg2)
4664 {
4665 enum tree_code comp_code = TREE_CODE (arg0);
4666 tree arg00 = TREE_OPERAND (arg0, 0);
4667 tree arg01 = TREE_OPERAND (arg0, 1);
4668 tree arg1_type = TREE_TYPE (arg1);
4669 tree tem;
4670
4671 STRIP_NOPS (arg1);
4672 STRIP_NOPS (arg2);
4673
4674 /* If we have A op 0 ? A : -A, consider applying the following
4675 transformations:
4676
4677 A == 0? A : -A same as -A
4678 A != 0? A : -A same as A
4679 A >= 0? A : -A same as abs (A)
4680 A > 0? A : -A same as abs (A)
4681 A <= 0? A : -A same as -abs (A)
4682 A < 0? A : -A same as -abs (A)
4683
4684 None of these transformations work for modes with signed
4685 zeros. If A is +/-0, the first two transformations will
4686 change the sign of the result (from +0 to -0, or vice
4687 versa). The last four will fix the sign of the result,
4688 even though the original expressions could be positive or
4689 negative, depending on the sign of A.
4690
4691 Note that all these transformations are correct if A is
4692 NaN, since the two alternatives (A and -A) are also NaNs. */
4693 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4694 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4695 ? real_zerop (arg01)
4696 : integer_zerop (arg01))
4697 && ((TREE_CODE (arg2) == NEGATE_EXPR
4698 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4699 /* In the case that A is of the form X-Y, '-A' (arg2) may
4700 have already been folded to Y-X, check for that. */
4701 || (TREE_CODE (arg1) == MINUS_EXPR
4702 && TREE_CODE (arg2) == MINUS_EXPR
4703 && operand_equal_p (TREE_OPERAND (arg1, 0),
4704 TREE_OPERAND (arg2, 1), 0)
4705 && operand_equal_p (TREE_OPERAND (arg1, 1),
4706 TREE_OPERAND (arg2, 0), 0))))
4707 switch (comp_code)
4708 {
4709 case EQ_EXPR:
4710 case UNEQ_EXPR:
4711 tem = fold_convert_loc (loc, arg1_type, arg1);
4712 return pedantic_non_lvalue_loc (loc,
4713 fold_convert_loc (loc, type,
4714 negate_expr (tem)));
4715 case NE_EXPR:
4716 case LTGT_EXPR:
4717 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4718 case UNGE_EXPR:
4719 case UNGT_EXPR:
4720 if (flag_trapping_math)
4721 break;
4722 /* Fall through. */
4723 case GE_EXPR:
4724 case GT_EXPR:
4725 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4726 arg1 = fold_convert_loc (loc, signed_type_for
4727 (TREE_TYPE (arg1)), arg1);
4728 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4729 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4730 case UNLE_EXPR:
4731 case UNLT_EXPR:
4732 if (flag_trapping_math)
4733 break;
4734 case LE_EXPR:
4735 case LT_EXPR:
4736 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4737 arg1 = fold_convert_loc (loc, signed_type_for
4738 (TREE_TYPE (arg1)), arg1);
4739 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4740 return negate_expr (fold_convert_loc (loc, type, tem));
4741 default:
4742 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4743 break;
4744 }
4745
4746 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4747 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4748 both transformations are correct when A is NaN: A != 0
4749 is then true, and A == 0 is false. */
4750
4751 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4752 && integer_zerop (arg01) && integer_zerop (arg2))
4753 {
4754 if (comp_code == NE_EXPR)
4755 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4756 else if (comp_code == EQ_EXPR)
4757 return build_zero_cst (type);
4758 }
4759
4760 /* Try some transformations of A op B ? A : B.
4761
4762 A == B? A : B same as B
4763 A != B? A : B same as A
4764 A >= B? A : B same as max (A, B)
4765 A > B? A : B same as max (B, A)
4766 A <= B? A : B same as min (A, B)
4767 A < B? A : B same as min (B, A)
4768
4769 As above, these transformations don't work in the presence
4770 of signed zeros. For example, if A and B are zeros of
4771 opposite sign, the first two transformations will change
4772 the sign of the result. In the last four, the original
4773 expressions give different results for (A=+0, B=-0) and
4774 (A=-0, B=+0), but the transformed expressions do not.
4775
4776 The first two transformations are correct if either A or B
4777 is a NaN. In the first transformation, the condition will
4778 be false, and B will indeed be chosen. In the case of the
4779 second transformation, the condition A != B will be true,
4780 and A will be chosen.
4781
4782 The conversions to max() and min() are not correct if B is
4783 a number and A is not. The conditions in the original
4784 expressions will be false, so all four give B. The min()
4785 and max() versions would give a NaN instead. */
4786 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4787 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4788 /* Avoid these transformations if the COND_EXPR may be used
4789 as an lvalue in the C++ front-end. PR c++/19199. */
4790 && (in_gimple_form
4791 || VECTOR_TYPE_P (type)
4792 || (strcmp (lang_hooks.name, "GNU C++") != 0
4793 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4794 || ! maybe_lvalue_p (arg1)
4795 || ! maybe_lvalue_p (arg2)))
4796 {
4797 tree comp_op0 = arg00;
4798 tree comp_op1 = arg01;
4799 tree comp_type = TREE_TYPE (comp_op0);
4800
4801 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4802 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4803 {
4804 comp_type = type;
4805 comp_op0 = arg1;
4806 comp_op1 = arg2;
4807 }
4808
4809 switch (comp_code)
4810 {
4811 case EQ_EXPR:
4812 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4813 case NE_EXPR:
4814 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4815 case LE_EXPR:
4816 case LT_EXPR:
4817 case UNLE_EXPR:
4818 case UNLT_EXPR:
4819 /* In C++ a ?: expression can be an lvalue, so put the
4820 operand which will be used if they are equal first
4821 so that we can convert this back to the
4822 corresponding COND_EXPR. */
4823 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4824 {
4825 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4826 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4827 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4828 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4829 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4830 comp_op1, comp_op0);
4831 return pedantic_non_lvalue_loc (loc,
4832 fold_convert_loc (loc, type, tem));
4833 }
4834 break;
4835 case GE_EXPR:
4836 case GT_EXPR:
4837 case UNGE_EXPR:
4838 case UNGT_EXPR:
4839 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4840 {
4841 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4842 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4843 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4844 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4845 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4846 comp_op1, comp_op0);
4847 return pedantic_non_lvalue_loc (loc,
4848 fold_convert_loc (loc, type, tem));
4849 }
4850 break;
4851 case UNEQ_EXPR:
4852 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4853 return pedantic_non_lvalue_loc (loc,
4854 fold_convert_loc (loc, type, arg2));
4855 break;
4856 case LTGT_EXPR:
4857 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4858 return pedantic_non_lvalue_loc (loc,
4859 fold_convert_loc (loc, type, arg1));
4860 break;
4861 default:
4862 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4863 break;
4864 }
4865 }
4866
4867 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4868 we might still be able to simplify this. For example,
4869 if C1 is one less or one more than C2, this might have started
4870 out as a MIN or MAX and been transformed by this function.
4871 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4872
4873 if (INTEGRAL_TYPE_P (type)
4874 && TREE_CODE (arg01) == INTEGER_CST
4875 && TREE_CODE (arg2) == INTEGER_CST)
4876 switch (comp_code)
4877 {
4878 case EQ_EXPR:
4879 if (TREE_CODE (arg1) == INTEGER_CST)
4880 break;
4881 /* We can replace A with C1 in this case. */
4882 arg1 = fold_convert_loc (loc, type, arg01);
4883 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4884
4885 case LT_EXPR:
4886 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4887 MIN_EXPR, to preserve the signedness of the comparison. */
4888 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4889 OEP_ONLY_CONST)
4890 && operand_equal_p (arg01,
4891 const_binop (PLUS_EXPR, arg2,
4892 build_int_cst (type, 1)),
4893 OEP_ONLY_CONST))
4894 {
4895 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4896 fold_convert_loc (loc, TREE_TYPE (arg00),
4897 arg2));
4898 return pedantic_non_lvalue_loc (loc,
4899 fold_convert_loc (loc, type, tem));
4900 }
4901 break;
4902
4903 case LE_EXPR:
4904 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4905 as above. */
4906 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4907 OEP_ONLY_CONST)
4908 && operand_equal_p (arg01,
4909 const_binop (MINUS_EXPR, arg2,
4910 build_int_cst (type, 1)),
4911 OEP_ONLY_CONST))
4912 {
4913 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4914 fold_convert_loc (loc, TREE_TYPE (arg00),
4915 arg2));
4916 return pedantic_non_lvalue_loc (loc,
4917 fold_convert_loc (loc, type, tem));
4918 }
4919 break;
4920
4921 case GT_EXPR:
4922 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4923 MAX_EXPR, to preserve the signedness of the comparison. */
4924 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4925 OEP_ONLY_CONST)
4926 && operand_equal_p (arg01,
4927 const_binop (MINUS_EXPR, arg2,
4928 build_int_cst (type, 1)),
4929 OEP_ONLY_CONST))
4930 {
4931 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4932 fold_convert_loc (loc, TREE_TYPE (arg00),
4933 arg2));
4934 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4935 }
4936 break;
4937
4938 case GE_EXPR:
4939 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4940 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4941 OEP_ONLY_CONST)
4942 && operand_equal_p (arg01,
4943 const_binop (PLUS_EXPR, arg2,
4944 build_int_cst (type, 1)),
4945 OEP_ONLY_CONST))
4946 {
4947 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4948 fold_convert_loc (loc, TREE_TYPE (arg00),
4949 arg2));
4950 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4951 }
4952 break;
4953 case NE_EXPR:
4954 break;
4955 default:
4956 gcc_unreachable ();
4957 }
4958
4959 return NULL_TREE;
4960 }
4961
4962
4963 \f
4964 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4965 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4966 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4967 false) >= 2)
4968 #endif
4969
4970 /* EXP is some logical combination of boolean tests. See if we can
4971 merge it into some range test. Return the new tree if so. */
4972
4973 static tree
4974 fold_range_test (location_t loc, enum tree_code code, tree type,
4975 tree op0, tree op1)
4976 {
4977 int or_op = (code == TRUTH_ORIF_EXPR
4978 || code == TRUTH_OR_EXPR);
4979 int in0_p, in1_p, in_p;
4980 tree low0, low1, low, high0, high1, high;
4981 bool strict_overflow_p = false;
4982 tree tem, lhs, rhs;
4983 const char * const warnmsg = G_("assuming signed overflow does not occur "
4984 "when simplifying range test");
4985
4986 if (!INTEGRAL_TYPE_P (type))
4987 return 0;
4988
4989 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4990 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4991
4992 /* If this is an OR operation, invert both sides; we will invert
4993 again at the end. */
4994 if (or_op)
4995 in0_p = ! in0_p, in1_p = ! in1_p;
4996
4997 /* If both expressions are the same, if we can merge the ranges, and we
4998 can build the range test, return it or it inverted. If one of the
4999 ranges is always true or always false, consider it to be the same
5000 expression as the other. */
5001 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5002 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5003 in1_p, low1, high1)
5004 && 0 != (tem = (build_range_check (loc, type,
5005 lhs != 0 ? lhs
5006 : rhs != 0 ? rhs : integer_zero_node,
5007 in_p, low, high))))
5008 {
5009 if (strict_overflow_p)
5010 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5011 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5012 }
5013
5014 /* On machines where the branch cost is expensive, if this is a
5015 short-circuited branch and the underlying object on both sides
5016 is the same, make a non-short-circuit operation. */
5017 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5018 && lhs != 0 && rhs != 0
5019 && (code == TRUTH_ANDIF_EXPR
5020 || code == TRUTH_ORIF_EXPR)
5021 && operand_equal_p (lhs, rhs, 0))
5022 {
5023 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5024 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5025 which cases we can't do this. */
5026 if (simple_operand_p (lhs))
5027 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5028 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5029 type, op0, op1);
5030
5031 else if (!lang_hooks.decls.global_bindings_p ()
5032 && !CONTAINS_PLACEHOLDER_P (lhs))
5033 {
5034 tree common = save_expr (lhs);
5035
5036 if (0 != (lhs = build_range_check (loc, type, common,
5037 or_op ? ! in0_p : in0_p,
5038 low0, high0))
5039 && (0 != (rhs = build_range_check (loc, type, common,
5040 or_op ? ! in1_p : in1_p,
5041 low1, high1))))
5042 {
5043 if (strict_overflow_p)
5044 fold_overflow_warning (warnmsg,
5045 WARN_STRICT_OVERFLOW_COMPARISON);
5046 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5047 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5048 type, lhs, rhs);
5049 }
5050 }
5051 }
5052
5053 return 0;
5054 }
5055 \f
5056 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5057 bit value. Arrange things so the extra bits will be set to zero if and
5058 only if C is signed-extended to its full width. If MASK is nonzero,
5059 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5060
5061 static tree
5062 unextend (tree c, int p, int unsignedp, tree mask)
5063 {
5064 tree type = TREE_TYPE (c);
5065 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5066 tree temp;
5067
5068 if (p == modesize || unsignedp)
5069 return c;
5070
5071 /* We work by getting just the sign bit into the low-order bit, then
5072 into the high-order bit, then sign-extend. We then XOR that value
5073 with C. */
5074 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5075 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5076
5077 /* We must use a signed type in order to get an arithmetic right shift.
5078 However, we must also avoid introducing accidental overflows, so that
5079 a subsequent call to integer_zerop will work. Hence we must
5080 do the type conversion here. At this point, the constant is either
5081 zero or one, and the conversion to a signed type can never overflow.
5082 We could get an overflow if this conversion is done anywhere else. */
5083 if (TYPE_UNSIGNED (type))
5084 temp = fold_convert (signed_type_for (type), temp);
5085
5086 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5087 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5088 if (mask != 0)
5089 temp = const_binop (BIT_AND_EXPR, temp,
5090 fold_convert (TREE_TYPE (c), mask));
5091 /* If necessary, convert the type back to match the type of C. */
5092 if (TYPE_UNSIGNED (type))
5093 temp = fold_convert (type, temp);
5094
5095 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5096 }
5097 \f
5098 /* For an expression that has the form
5099 (A && B) || ~B
5100 or
5101 (A || B) && ~B,
5102 we can drop one of the inner expressions and simplify to
5103 A || ~B
5104 or
5105 A && ~B
5106 LOC is the location of the resulting expression. OP is the inner
5107 logical operation; the left-hand side in the examples above, while CMPOP
5108 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5109 removing a condition that guards another, as in
5110 (A != NULL && A->...) || A == NULL
5111 which we must not transform. If RHS_ONLY is true, only eliminate the
5112 right-most operand of the inner logical operation. */
5113
5114 static tree
5115 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5116 bool rhs_only)
5117 {
5118 tree type = TREE_TYPE (cmpop);
5119 enum tree_code code = TREE_CODE (cmpop);
5120 enum tree_code truthop_code = TREE_CODE (op);
5121 tree lhs = TREE_OPERAND (op, 0);
5122 tree rhs = TREE_OPERAND (op, 1);
5123 tree orig_lhs = lhs, orig_rhs = rhs;
5124 enum tree_code rhs_code = TREE_CODE (rhs);
5125 enum tree_code lhs_code = TREE_CODE (lhs);
5126 enum tree_code inv_code;
5127
5128 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5129 return NULL_TREE;
5130
5131 if (TREE_CODE_CLASS (code) != tcc_comparison)
5132 return NULL_TREE;
5133
5134 if (rhs_code == truthop_code)
5135 {
5136 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5137 if (newrhs != NULL_TREE)
5138 {
5139 rhs = newrhs;
5140 rhs_code = TREE_CODE (rhs);
5141 }
5142 }
5143 if (lhs_code == truthop_code && !rhs_only)
5144 {
5145 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5146 if (newlhs != NULL_TREE)
5147 {
5148 lhs = newlhs;
5149 lhs_code = TREE_CODE (lhs);
5150 }
5151 }
5152
5153 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5154 if (inv_code == rhs_code
5155 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5156 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5157 return lhs;
5158 if (!rhs_only && inv_code == lhs_code
5159 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5160 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5161 return rhs;
5162 if (rhs != orig_rhs || lhs != orig_lhs)
5163 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5164 lhs, rhs);
5165 return NULL_TREE;
5166 }
5167
5168 /* Find ways of folding logical expressions of LHS and RHS:
5169 Try to merge two comparisons to the same innermost item.
5170 Look for range tests like "ch >= '0' && ch <= '9'".
5171 Look for combinations of simple terms on machines with expensive branches
5172 and evaluate the RHS unconditionally.
5173
5174 For example, if we have p->a == 2 && p->b == 4 and we can make an
5175 object large enough to span both A and B, we can do this with a comparison
5176 against the object ANDed with the a mask.
5177
5178 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5179 operations to do this with one comparison.
5180
5181 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5182 function and the one above.
5183
5184 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5185 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5186
5187 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5188 two operands.
5189
5190 We return the simplified tree or 0 if no optimization is possible. */
5191
5192 static tree
5193 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5194 tree lhs, tree rhs)
5195 {
5196 /* If this is the "or" of two comparisons, we can do something if
5197 the comparisons are NE_EXPR. If this is the "and", we can do something
5198 if the comparisons are EQ_EXPR. I.e.,
5199 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5200
5201 WANTED_CODE is this operation code. For single bit fields, we can
5202 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5203 comparison for one-bit fields. */
5204
5205 enum tree_code wanted_code;
5206 enum tree_code lcode, rcode;
5207 tree ll_arg, lr_arg, rl_arg, rr_arg;
5208 tree ll_inner, lr_inner, rl_inner, rr_inner;
5209 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5210 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5211 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5212 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5213 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5214 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5215 enum machine_mode lnmode, rnmode;
5216 tree ll_mask, lr_mask, rl_mask, rr_mask;
5217 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5218 tree l_const, r_const;
5219 tree lntype, rntype, result;
5220 HOST_WIDE_INT first_bit, end_bit;
5221 int volatilep;
5222
5223 /* Start by getting the comparison codes. Fail if anything is volatile.
5224 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5225 it were surrounded with a NE_EXPR. */
5226
5227 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5228 return 0;
5229
5230 lcode = TREE_CODE (lhs);
5231 rcode = TREE_CODE (rhs);
5232
5233 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5234 {
5235 lhs = build2 (NE_EXPR, truth_type, lhs,
5236 build_int_cst (TREE_TYPE (lhs), 0));
5237 lcode = NE_EXPR;
5238 }
5239
5240 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5241 {
5242 rhs = build2 (NE_EXPR, truth_type, rhs,
5243 build_int_cst (TREE_TYPE (rhs), 0));
5244 rcode = NE_EXPR;
5245 }
5246
5247 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5248 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5249 return 0;
5250
5251 ll_arg = TREE_OPERAND (lhs, 0);
5252 lr_arg = TREE_OPERAND (lhs, 1);
5253 rl_arg = TREE_OPERAND (rhs, 0);
5254 rr_arg = TREE_OPERAND (rhs, 1);
5255
5256 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5257 if (simple_operand_p (ll_arg)
5258 && simple_operand_p (lr_arg))
5259 {
5260 if (operand_equal_p (ll_arg, rl_arg, 0)
5261 && operand_equal_p (lr_arg, rr_arg, 0))
5262 {
5263 result = combine_comparisons (loc, code, lcode, rcode,
5264 truth_type, ll_arg, lr_arg);
5265 if (result)
5266 return result;
5267 }
5268 else if (operand_equal_p (ll_arg, rr_arg, 0)
5269 && operand_equal_p (lr_arg, rl_arg, 0))
5270 {
5271 result = combine_comparisons (loc, code, lcode,
5272 swap_tree_comparison (rcode),
5273 truth_type, ll_arg, lr_arg);
5274 if (result)
5275 return result;
5276 }
5277 }
5278
5279 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5280 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5281
5282 /* If the RHS can be evaluated unconditionally and its operands are
5283 simple, it wins to evaluate the RHS unconditionally on machines
5284 with expensive branches. In this case, this isn't a comparison
5285 that can be merged. */
5286
5287 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5288 false) >= 2
5289 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5290 && simple_operand_p (rl_arg)
5291 && simple_operand_p (rr_arg))
5292 {
5293 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5294 if (code == TRUTH_OR_EXPR
5295 && lcode == NE_EXPR && integer_zerop (lr_arg)
5296 && rcode == NE_EXPR && integer_zerop (rr_arg)
5297 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5298 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5299 return build2_loc (loc, NE_EXPR, truth_type,
5300 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5301 ll_arg, rl_arg),
5302 build_int_cst (TREE_TYPE (ll_arg), 0));
5303
5304 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5305 if (code == TRUTH_AND_EXPR
5306 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5307 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5308 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5309 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5310 return build2_loc (loc, EQ_EXPR, truth_type,
5311 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5312 ll_arg, rl_arg),
5313 build_int_cst (TREE_TYPE (ll_arg), 0));
5314 }
5315
5316 /* See if the comparisons can be merged. Then get all the parameters for
5317 each side. */
5318
5319 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5320 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5321 return 0;
5322
5323 volatilep = 0;
5324 ll_inner = decode_field_reference (loc, ll_arg,
5325 &ll_bitsize, &ll_bitpos, &ll_mode,
5326 &ll_unsignedp, &volatilep, &ll_mask,
5327 &ll_and_mask);
5328 lr_inner = decode_field_reference (loc, lr_arg,
5329 &lr_bitsize, &lr_bitpos, &lr_mode,
5330 &lr_unsignedp, &volatilep, &lr_mask,
5331 &lr_and_mask);
5332 rl_inner = decode_field_reference (loc, rl_arg,
5333 &rl_bitsize, &rl_bitpos, &rl_mode,
5334 &rl_unsignedp, &volatilep, &rl_mask,
5335 &rl_and_mask);
5336 rr_inner = decode_field_reference (loc, rr_arg,
5337 &rr_bitsize, &rr_bitpos, &rr_mode,
5338 &rr_unsignedp, &volatilep, &rr_mask,
5339 &rr_and_mask);
5340
5341 /* It must be true that the inner operation on the lhs of each
5342 comparison must be the same if we are to be able to do anything.
5343 Then see if we have constants. If not, the same must be true for
5344 the rhs's. */
5345 if (volatilep || ll_inner == 0 || rl_inner == 0
5346 || ! operand_equal_p (ll_inner, rl_inner, 0))
5347 return 0;
5348
5349 if (TREE_CODE (lr_arg) == INTEGER_CST
5350 && TREE_CODE (rr_arg) == INTEGER_CST)
5351 l_const = lr_arg, r_const = rr_arg;
5352 else if (lr_inner == 0 || rr_inner == 0
5353 || ! operand_equal_p (lr_inner, rr_inner, 0))
5354 return 0;
5355 else
5356 l_const = r_const = 0;
5357
5358 /* If either comparison code is not correct for our logical operation,
5359 fail. However, we can convert a one-bit comparison against zero into
5360 the opposite comparison against that bit being set in the field. */
5361
5362 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5363 if (lcode != wanted_code)
5364 {
5365 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5366 {
5367 /* Make the left operand unsigned, since we are only interested
5368 in the value of one bit. Otherwise we are doing the wrong
5369 thing below. */
5370 ll_unsignedp = 1;
5371 l_const = ll_mask;
5372 }
5373 else
5374 return 0;
5375 }
5376
5377 /* This is analogous to the code for l_const above. */
5378 if (rcode != wanted_code)
5379 {
5380 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5381 {
5382 rl_unsignedp = 1;
5383 r_const = rl_mask;
5384 }
5385 else
5386 return 0;
5387 }
5388
5389 /* See if we can find a mode that contains both fields being compared on
5390 the left. If we can't, fail. Otherwise, update all constants and masks
5391 to be relative to a field of that size. */
5392 first_bit = MIN (ll_bitpos, rl_bitpos);
5393 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5394 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5395 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5396 volatilep);
5397 if (lnmode == VOIDmode)
5398 return 0;
5399
5400 lnbitsize = GET_MODE_BITSIZE (lnmode);
5401 lnbitpos = first_bit & ~ (lnbitsize - 1);
5402 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5403 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5404
5405 if (BYTES_BIG_ENDIAN)
5406 {
5407 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5408 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5409 }
5410
5411 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5412 size_int (xll_bitpos));
5413 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5414 size_int (xrl_bitpos));
5415
5416 if (l_const)
5417 {
5418 l_const = fold_convert_loc (loc, lntype, l_const);
5419 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5420 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5421 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5422 fold_build1_loc (loc, BIT_NOT_EXPR,
5423 lntype, ll_mask))))
5424 {
5425 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5426
5427 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5428 }
5429 }
5430 if (r_const)
5431 {
5432 r_const = fold_convert_loc (loc, lntype, r_const);
5433 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5434 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5435 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5436 fold_build1_loc (loc, BIT_NOT_EXPR,
5437 lntype, rl_mask))))
5438 {
5439 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5440
5441 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5442 }
5443 }
5444
5445 /* If the right sides are not constant, do the same for it. Also,
5446 disallow this optimization if a size or signedness mismatch occurs
5447 between the left and right sides. */
5448 if (l_const == 0)
5449 {
5450 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5451 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5452 /* Make sure the two fields on the right
5453 correspond to the left without being swapped. */
5454 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5455 return 0;
5456
5457 first_bit = MIN (lr_bitpos, rr_bitpos);
5458 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5459 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5460 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5461 volatilep);
5462 if (rnmode == VOIDmode)
5463 return 0;
5464
5465 rnbitsize = GET_MODE_BITSIZE (rnmode);
5466 rnbitpos = first_bit & ~ (rnbitsize - 1);
5467 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5468 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5469
5470 if (BYTES_BIG_ENDIAN)
5471 {
5472 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5473 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5474 }
5475
5476 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5477 rntype, lr_mask),
5478 size_int (xlr_bitpos));
5479 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5480 rntype, rr_mask),
5481 size_int (xrr_bitpos));
5482
5483 /* Make a mask that corresponds to both fields being compared.
5484 Do this for both items being compared. If the operands are the
5485 same size and the bits being compared are in the same position
5486 then we can do this by masking both and comparing the masked
5487 results. */
5488 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5489 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5490 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5491 {
5492 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5493 ll_unsignedp || rl_unsignedp);
5494 if (! all_ones_mask_p (ll_mask, lnbitsize))
5495 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5496
5497 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5498 lr_unsignedp || rr_unsignedp);
5499 if (! all_ones_mask_p (lr_mask, rnbitsize))
5500 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5501
5502 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5503 }
5504
5505 /* There is still another way we can do something: If both pairs of
5506 fields being compared are adjacent, we may be able to make a wider
5507 field containing them both.
5508
5509 Note that we still must mask the lhs/rhs expressions. Furthermore,
5510 the mask must be shifted to account for the shift done by
5511 make_bit_field_ref. */
5512 if ((ll_bitsize + ll_bitpos == rl_bitpos
5513 && lr_bitsize + lr_bitpos == rr_bitpos)
5514 || (ll_bitpos == rl_bitpos + rl_bitsize
5515 && lr_bitpos == rr_bitpos + rr_bitsize))
5516 {
5517 tree type;
5518
5519 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5520 ll_bitsize + rl_bitsize,
5521 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5522 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5523 lr_bitsize + rr_bitsize,
5524 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5525
5526 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5527 size_int (MIN (xll_bitpos, xrl_bitpos)));
5528 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5529 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5530
5531 /* Convert to the smaller type before masking out unwanted bits. */
5532 type = lntype;
5533 if (lntype != rntype)
5534 {
5535 if (lnbitsize > rnbitsize)
5536 {
5537 lhs = fold_convert_loc (loc, rntype, lhs);
5538 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5539 type = rntype;
5540 }
5541 else if (lnbitsize < rnbitsize)
5542 {
5543 rhs = fold_convert_loc (loc, lntype, rhs);
5544 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5545 type = lntype;
5546 }
5547 }
5548
5549 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5550 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5551
5552 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5553 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5554
5555 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5556 }
5557
5558 return 0;
5559 }
5560
5561 /* Handle the case of comparisons with constants. If there is something in
5562 common between the masks, those bits of the constants must be the same.
5563 If not, the condition is always false. Test for this to avoid generating
5564 incorrect code below. */
5565 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5566 if (! integer_zerop (result)
5567 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5568 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5569 {
5570 if (wanted_code == NE_EXPR)
5571 {
5572 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5573 return constant_boolean_node (true, truth_type);
5574 }
5575 else
5576 {
5577 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5578 return constant_boolean_node (false, truth_type);
5579 }
5580 }
5581
5582 /* Construct the expression we will return. First get the component
5583 reference we will make. Unless the mask is all ones the width of
5584 that field, perform the mask operation. Then compare with the
5585 merged constant. */
5586 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5587 ll_unsignedp || rl_unsignedp);
5588
5589 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5590 if (! all_ones_mask_p (ll_mask, lnbitsize))
5591 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5592
5593 return build2_loc (loc, wanted_code, truth_type, result,
5594 const_binop (BIT_IOR_EXPR, l_const, r_const));
5595 }
5596 \f
5597 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5598 constant. */
5599
5600 static tree
5601 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5602 tree op0, tree op1)
5603 {
5604 tree arg0 = op0;
5605 enum tree_code op_code;
5606 tree comp_const;
5607 tree minmax_const;
5608 int consts_equal, consts_lt;
5609 tree inner;
5610
5611 STRIP_SIGN_NOPS (arg0);
5612
5613 op_code = TREE_CODE (arg0);
5614 minmax_const = TREE_OPERAND (arg0, 1);
5615 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5616 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5617 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5618 inner = TREE_OPERAND (arg0, 0);
5619
5620 /* If something does not permit us to optimize, return the original tree. */
5621 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5622 || TREE_CODE (comp_const) != INTEGER_CST
5623 || TREE_OVERFLOW (comp_const)
5624 || TREE_CODE (minmax_const) != INTEGER_CST
5625 || TREE_OVERFLOW (minmax_const))
5626 return NULL_TREE;
5627
5628 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5629 and GT_EXPR, doing the rest with recursive calls using logical
5630 simplifications. */
5631 switch (code)
5632 {
5633 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5634 {
5635 tree tem
5636 = optimize_minmax_comparison (loc,
5637 invert_tree_comparison (code, false),
5638 type, op0, op1);
5639 if (tem)
5640 return invert_truthvalue_loc (loc, tem);
5641 return NULL_TREE;
5642 }
5643
5644 case GE_EXPR:
5645 return
5646 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5647 optimize_minmax_comparison
5648 (loc, EQ_EXPR, type, arg0, comp_const),
5649 optimize_minmax_comparison
5650 (loc, GT_EXPR, type, arg0, comp_const));
5651
5652 case EQ_EXPR:
5653 if (op_code == MAX_EXPR && consts_equal)
5654 /* MAX (X, 0) == 0 -> X <= 0 */
5655 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5656
5657 else if (op_code == MAX_EXPR && consts_lt)
5658 /* MAX (X, 0) == 5 -> X == 5 */
5659 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5660
5661 else if (op_code == MAX_EXPR)
5662 /* MAX (X, 0) == -1 -> false */
5663 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5664
5665 else if (consts_equal)
5666 /* MIN (X, 0) == 0 -> X >= 0 */
5667 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5668
5669 else if (consts_lt)
5670 /* MIN (X, 0) == 5 -> false */
5671 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5672
5673 else
5674 /* MIN (X, 0) == -1 -> X == -1 */
5675 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5676
5677 case GT_EXPR:
5678 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5679 /* MAX (X, 0) > 0 -> X > 0
5680 MAX (X, 0) > 5 -> X > 5 */
5681 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5682
5683 else if (op_code == MAX_EXPR)
5684 /* MAX (X, 0) > -1 -> true */
5685 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5686
5687 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5688 /* MIN (X, 0) > 0 -> false
5689 MIN (X, 0) > 5 -> false */
5690 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5691
5692 else
5693 /* MIN (X, 0) > -1 -> X > -1 */
5694 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5695
5696 default:
5697 return NULL_TREE;
5698 }
5699 }
5700 \f
5701 /* T is an integer expression that is being multiplied, divided, or taken a
5702 modulus (CODE says which and what kind of divide or modulus) by a
5703 constant C. See if we can eliminate that operation by folding it with
5704 other operations already in T. WIDE_TYPE, if non-null, is a type that
5705 should be used for the computation if wider than our type.
5706
5707 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5708 (X * 2) + (Y * 4). We must, however, be assured that either the original
5709 expression would not overflow or that overflow is undefined for the type
5710 in the language in question.
5711
5712 If we return a non-null expression, it is an equivalent form of the
5713 original computation, but need not be in the original type.
5714
5715 We set *STRICT_OVERFLOW_P to true if the return values depends on
5716 signed overflow being undefined. Otherwise we do not change
5717 *STRICT_OVERFLOW_P. */
5718
5719 static tree
5720 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5721 bool *strict_overflow_p)
5722 {
5723 /* To avoid exponential search depth, refuse to allow recursion past
5724 three levels. Beyond that (1) it's highly unlikely that we'll find
5725 something interesting and (2) we've probably processed it before
5726 when we built the inner expression. */
5727
5728 static int depth;
5729 tree ret;
5730
5731 if (depth > 3)
5732 return NULL;
5733
5734 depth++;
5735 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5736 depth--;
5737
5738 return ret;
5739 }
5740
5741 static tree
5742 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5743 bool *strict_overflow_p)
5744 {
5745 tree type = TREE_TYPE (t);
5746 enum tree_code tcode = TREE_CODE (t);
5747 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5748 > GET_MODE_SIZE (TYPE_MODE (type)))
5749 ? wide_type : type);
5750 tree t1, t2;
5751 int same_p = tcode == code;
5752 tree op0 = NULL_TREE, op1 = NULL_TREE;
5753 bool sub_strict_overflow_p;
5754
5755 /* Don't deal with constants of zero here; they confuse the code below. */
5756 if (integer_zerop (c))
5757 return NULL_TREE;
5758
5759 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5760 op0 = TREE_OPERAND (t, 0);
5761
5762 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5763 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5764
5765 /* Note that we need not handle conditional operations here since fold
5766 already handles those cases. So just do arithmetic here. */
5767 switch (tcode)
5768 {
5769 case INTEGER_CST:
5770 /* For a constant, we can always simplify if we are a multiply
5771 or (for divide and modulus) if it is a multiple of our constant. */
5772 if (code == MULT_EXPR
5773 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5774 return const_binop (code, fold_convert (ctype, t),
5775 fold_convert (ctype, c));
5776 break;
5777
5778 CASE_CONVERT: case NON_LVALUE_EXPR:
5779 /* If op0 is an expression ... */
5780 if ((COMPARISON_CLASS_P (op0)
5781 || UNARY_CLASS_P (op0)
5782 || BINARY_CLASS_P (op0)
5783 || VL_EXP_CLASS_P (op0)
5784 || EXPRESSION_CLASS_P (op0))
5785 /* ... and has wrapping overflow, and its type is smaller
5786 than ctype, then we cannot pass through as widening. */
5787 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5788 && (TYPE_PRECISION (ctype)
5789 > TYPE_PRECISION (TREE_TYPE (op0))))
5790 /* ... or this is a truncation (t is narrower than op0),
5791 then we cannot pass through this narrowing. */
5792 || (TYPE_PRECISION (type)
5793 < TYPE_PRECISION (TREE_TYPE (op0)))
5794 /* ... or signedness changes for division or modulus,
5795 then we cannot pass through this conversion. */
5796 || (code != MULT_EXPR
5797 && (TYPE_UNSIGNED (ctype)
5798 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5799 /* ... or has undefined overflow while the converted to
5800 type has not, we cannot do the operation in the inner type
5801 as that would introduce undefined overflow. */
5802 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5803 && !TYPE_OVERFLOW_UNDEFINED (type))))
5804 break;
5805
5806 /* Pass the constant down and see if we can make a simplification. If
5807 we can, replace this expression with the inner simplification for
5808 possible later conversion to our or some other type. */
5809 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5810 && TREE_CODE (t2) == INTEGER_CST
5811 && !TREE_OVERFLOW (t2)
5812 && (0 != (t1 = extract_muldiv (op0, t2, code,
5813 code == MULT_EXPR
5814 ? ctype : NULL_TREE,
5815 strict_overflow_p))))
5816 return t1;
5817 break;
5818
5819 case ABS_EXPR:
5820 /* If widening the type changes it from signed to unsigned, then we
5821 must avoid building ABS_EXPR itself as unsigned. */
5822 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5823 {
5824 tree cstype = (*signed_type_for) (ctype);
5825 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5826 != 0)
5827 {
5828 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5829 return fold_convert (ctype, t1);
5830 }
5831 break;
5832 }
5833 /* If the constant is negative, we cannot simplify this. */
5834 if (tree_int_cst_sgn (c) == -1)
5835 break;
5836 /* FALLTHROUGH */
5837 case NEGATE_EXPR:
5838 /* For division and modulus, type can't be unsigned, as e.g.
5839 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5840 For signed types, even with wrapping overflow, this is fine. */
5841 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5842 break;
5843 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5844 != 0)
5845 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5846 break;
5847
5848 case MIN_EXPR: case MAX_EXPR:
5849 /* If widening the type changes the signedness, then we can't perform
5850 this optimization as that changes the result. */
5851 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5852 break;
5853
5854 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5855 sub_strict_overflow_p = false;
5856 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5857 &sub_strict_overflow_p)) != 0
5858 && (t2 = extract_muldiv (op1, c, code, wide_type,
5859 &sub_strict_overflow_p)) != 0)
5860 {
5861 if (tree_int_cst_sgn (c) < 0)
5862 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5863 if (sub_strict_overflow_p)
5864 *strict_overflow_p = true;
5865 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5866 fold_convert (ctype, t2));
5867 }
5868 break;
5869
5870 case LSHIFT_EXPR: case RSHIFT_EXPR:
5871 /* If the second operand is constant, this is a multiplication
5872 or floor division, by a power of two, so we can treat it that
5873 way unless the multiplier or divisor overflows. Signed
5874 left-shift overflow is implementation-defined rather than
5875 undefined in C90, so do not convert signed left shift into
5876 multiplication. */
5877 if (TREE_CODE (op1) == INTEGER_CST
5878 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5879 /* const_binop may not detect overflow correctly,
5880 so check for it explicitly here. */
5881 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5882 && TREE_INT_CST_HIGH (op1) == 0
5883 && 0 != (t1 = fold_convert (ctype,
5884 const_binop (LSHIFT_EXPR,
5885 size_one_node,
5886 op1)))
5887 && !TREE_OVERFLOW (t1))
5888 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5889 ? MULT_EXPR : FLOOR_DIV_EXPR,
5890 ctype,
5891 fold_convert (ctype, op0),
5892 t1),
5893 c, code, wide_type, strict_overflow_p);
5894 break;
5895
5896 case PLUS_EXPR: case MINUS_EXPR:
5897 /* See if we can eliminate the operation on both sides. If we can, we
5898 can return a new PLUS or MINUS. If we can't, the only remaining
5899 cases where we can do anything are if the second operand is a
5900 constant. */
5901 sub_strict_overflow_p = false;
5902 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5903 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5904 if (t1 != 0 && t2 != 0
5905 && (code == MULT_EXPR
5906 /* If not multiplication, we can only do this if both operands
5907 are divisible by c. */
5908 || (multiple_of_p (ctype, op0, c)
5909 && multiple_of_p (ctype, op1, c))))
5910 {
5911 if (sub_strict_overflow_p)
5912 *strict_overflow_p = true;
5913 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5914 fold_convert (ctype, t2));
5915 }
5916
5917 /* If this was a subtraction, negate OP1 and set it to be an addition.
5918 This simplifies the logic below. */
5919 if (tcode == MINUS_EXPR)
5920 {
5921 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5922 /* If OP1 was not easily negatable, the constant may be OP0. */
5923 if (TREE_CODE (op0) == INTEGER_CST)
5924 {
5925 tree tem = op0;
5926 op0 = op1;
5927 op1 = tem;
5928 tem = t1;
5929 t1 = t2;
5930 t2 = tem;
5931 }
5932 }
5933
5934 if (TREE_CODE (op1) != INTEGER_CST)
5935 break;
5936
5937 /* If either OP1 or C are negative, this optimization is not safe for
5938 some of the division and remainder types while for others we need
5939 to change the code. */
5940 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5941 {
5942 if (code == CEIL_DIV_EXPR)
5943 code = FLOOR_DIV_EXPR;
5944 else if (code == FLOOR_DIV_EXPR)
5945 code = CEIL_DIV_EXPR;
5946 else if (code != MULT_EXPR
5947 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5948 break;
5949 }
5950
5951 /* If it's a multiply or a division/modulus operation of a multiple
5952 of our constant, do the operation and verify it doesn't overflow. */
5953 if (code == MULT_EXPR
5954 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5955 {
5956 op1 = const_binop (code, fold_convert (ctype, op1),
5957 fold_convert (ctype, c));
5958 /* We allow the constant to overflow with wrapping semantics. */
5959 if (op1 == 0
5960 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5961 break;
5962 }
5963 else
5964 break;
5965
5966 /* If we have an unsigned type, we cannot widen the operation since it
5967 will change the result if the original computation overflowed. */
5968 if (TYPE_UNSIGNED (ctype) && ctype != type)
5969 break;
5970
5971 /* If we were able to eliminate our operation from the first side,
5972 apply our operation to the second side and reform the PLUS. */
5973 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5974 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5975
5976 /* The last case is if we are a multiply. In that case, we can
5977 apply the distributive law to commute the multiply and addition
5978 if the multiplication of the constants doesn't overflow
5979 and overflow is defined. With undefined overflow
5980 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5981 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5982 return fold_build2 (tcode, ctype,
5983 fold_build2 (code, ctype,
5984 fold_convert (ctype, op0),
5985 fold_convert (ctype, c)),
5986 op1);
5987
5988 break;
5989
5990 case MULT_EXPR:
5991 /* We have a special case here if we are doing something like
5992 (C * 8) % 4 since we know that's zero. */
5993 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5994 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5995 /* If the multiplication can overflow we cannot optimize this. */
5996 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5997 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5998 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5999 {
6000 *strict_overflow_p = true;
6001 return omit_one_operand (type, integer_zero_node, op0);
6002 }
6003
6004 /* ... fall through ... */
6005
6006 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6007 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6008 /* If we can extract our operation from the LHS, do so and return a
6009 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6010 do something only if the second operand is a constant. */
6011 if (same_p
6012 && (t1 = extract_muldiv (op0, c, code, wide_type,
6013 strict_overflow_p)) != 0)
6014 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6015 fold_convert (ctype, op1));
6016 else if (tcode == MULT_EXPR && code == MULT_EXPR
6017 && (t1 = extract_muldiv (op1, c, code, wide_type,
6018 strict_overflow_p)) != 0)
6019 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6020 fold_convert (ctype, t1));
6021 else if (TREE_CODE (op1) != INTEGER_CST)
6022 return 0;
6023
6024 /* If these are the same operation types, we can associate them
6025 assuming no overflow. */
6026 if (tcode == code)
6027 {
6028 double_int mul;
6029 bool overflow_p;
6030 unsigned prec = TYPE_PRECISION (ctype);
6031 bool uns = TYPE_UNSIGNED (ctype);
6032 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6033 double_int dic = tree_to_double_int (c).ext (prec, uns);
6034 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6035 overflow_p = ((!uns && overflow_p)
6036 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6037 if (!double_int_fits_to_tree_p (ctype, mul)
6038 && ((uns && tcode != MULT_EXPR) || !uns))
6039 overflow_p = 1;
6040 if (!overflow_p)
6041 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6042 double_int_to_tree (ctype, mul));
6043 }
6044
6045 /* If these operations "cancel" each other, we have the main
6046 optimizations of this pass, which occur when either constant is a
6047 multiple of the other, in which case we replace this with either an
6048 operation or CODE or TCODE.
6049
6050 If we have an unsigned type, we cannot do this since it will change
6051 the result if the original computation overflowed. */
6052 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6053 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6054 || (tcode == MULT_EXPR
6055 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6056 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6057 && code != MULT_EXPR)))
6058 {
6059 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6060 {
6061 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6062 *strict_overflow_p = true;
6063 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6064 fold_convert (ctype,
6065 const_binop (TRUNC_DIV_EXPR,
6066 op1, c)));
6067 }
6068 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6069 {
6070 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6071 *strict_overflow_p = true;
6072 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6073 fold_convert (ctype,
6074 const_binop (TRUNC_DIV_EXPR,
6075 c, op1)));
6076 }
6077 }
6078 break;
6079
6080 default:
6081 break;
6082 }
6083
6084 return 0;
6085 }
6086 \f
6087 /* Return a node which has the indicated constant VALUE (either 0 or
6088 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6089 and is of the indicated TYPE. */
6090
6091 tree
6092 constant_boolean_node (bool value, tree type)
6093 {
6094 if (type == integer_type_node)
6095 return value ? integer_one_node : integer_zero_node;
6096 else if (type == boolean_type_node)
6097 return value ? boolean_true_node : boolean_false_node;
6098 else if (TREE_CODE (type) == VECTOR_TYPE)
6099 return build_vector_from_val (type,
6100 build_int_cst (TREE_TYPE (type),
6101 value ? -1 : 0));
6102 else
6103 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6104 }
6105
6106
6107 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6108 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6109 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6110 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6111 COND is the first argument to CODE; otherwise (as in the example
6112 given here), it is the second argument. TYPE is the type of the
6113 original expression. Return NULL_TREE if no simplification is
6114 possible. */
6115
6116 static tree
6117 fold_binary_op_with_conditional_arg (location_t loc,
6118 enum tree_code code,
6119 tree type, tree op0, tree op1,
6120 tree cond, tree arg, int cond_first_p)
6121 {
6122 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6123 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6124 tree test, true_value, false_value;
6125 tree lhs = NULL_TREE;
6126 tree rhs = NULL_TREE;
6127 enum tree_code cond_code = COND_EXPR;
6128
6129 if (TREE_CODE (cond) == COND_EXPR
6130 || TREE_CODE (cond) == VEC_COND_EXPR)
6131 {
6132 test = TREE_OPERAND (cond, 0);
6133 true_value = TREE_OPERAND (cond, 1);
6134 false_value = TREE_OPERAND (cond, 2);
6135 /* If this operand throws an expression, then it does not make
6136 sense to try to perform a logical or arithmetic operation
6137 involving it. */
6138 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6139 lhs = true_value;
6140 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6141 rhs = false_value;
6142 }
6143 else
6144 {
6145 tree testtype = TREE_TYPE (cond);
6146 test = cond;
6147 true_value = constant_boolean_node (true, testtype);
6148 false_value = constant_boolean_node (false, testtype);
6149 }
6150
6151 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6152 cond_code = VEC_COND_EXPR;
6153
6154 /* This transformation is only worthwhile if we don't have to wrap ARG
6155 in a SAVE_EXPR and the operation can be simplified without recursing
6156 on at least one of the branches once its pushed inside the COND_EXPR. */
6157 if (!TREE_CONSTANT (arg)
6158 && (TREE_SIDE_EFFECTS (arg)
6159 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6160 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6161 return NULL_TREE;
6162
6163 arg = fold_convert_loc (loc, arg_type, arg);
6164 if (lhs == 0)
6165 {
6166 true_value = fold_convert_loc (loc, cond_type, true_value);
6167 if (cond_first_p)
6168 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6169 else
6170 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6171 }
6172 if (rhs == 0)
6173 {
6174 false_value = fold_convert_loc (loc, cond_type, false_value);
6175 if (cond_first_p)
6176 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6177 else
6178 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6179 }
6180
6181 /* Check that we have simplified at least one of the branches. */
6182 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6183 return NULL_TREE;
6184
6185 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6186 }
6187
6188 \f
6189 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6190
6191 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6192 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6193 ADDEND is the same as X.
6194
6195 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6196 and finite. The problematic cases are when X is zero, and its mode
6197 has signed zeros. In the case of rounding towards -infinity,
6198 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6199 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6200
6201 bool
6202 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6203 {
6204 if (!real_zerop (addend))
6205 return false;
6206
6207 /* Don't allow the fold with -fsignaling-nans. */
6208 if (HONOR_SNANS (TYPE_MODE (type)))
6209 return false;
6210
6211 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6212 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6213 return true;
6214
6215 /* In a vector or complex, we would need to check the sign of all zeros. */
6216 if (TREE_CODE (addend) != REAL_CST)
6217 return false;
6218
6219 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6220 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6221 negate = !negate;
6222
6223 /* The mode has signed zeros, and we have to honor their sign.
6224 In this situation, there is only one case we can return true for.
6225 X - 0 is the same as X unless rounding towards -infinity is
6226 supported. */
6227 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6228 }
6229
6230 /* Subroutine of fold() that checks comparisons of built-in math
6231 functions against real constants.
6232
6233 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6234 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6235 is the type of the result and ARG0 and ARG1 are the operands of the
6236 comparison. ARG1 must be a TREE_REAL_CST.
6237
6238 The function returns the constant folded tree if a simplification
6239 can be made, and NULL_TREE otherwise. */
6240
6241 static tree
6242 fold_mathfn_compare (location_t loc,
6243 enum built_in_function fcode, enum tree_code code,
6244 tree type, tree arg0, tree arg1)
6245 {
6246 REAL_VALUE_TYPE c;
6247
6248 if (BUILTIN_SQRT_P (fcode))
6249 {
6250 tree arg = CALL_EXPR_ARG (arg0, 0);
6251 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6252
6253 c = TREE_REAL_CST (arg1);
6254 if (REAL_VALUE_NEGATIVE (c))
6255 {
6256 /* sqrt(x) < y is always false, if y is negative. */
6257 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6258 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6259
6260 /* sqrt(x) > y is always true, if y is negative and we
6261 don't care about NaNs, i.e. negative values of x. */
6262 if (code == NE_EXPR || !HONOR_NANS (mode))
6263 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6264
6265 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6266 return fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg), dconst0));
6268 }
6269 else if (code == GT_EXPR || code == GE_EXPR)
6270 {
6271 REAL_VALUE_TYPE c2;
6272
6273 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6274 real_convert (&c2, mode, &c2);
6275
6276 if (REAL_VALUE_ISINF (c2))
6277 {
6278 /* sqrt(x) > y is x == +Inf, when y is very large. */
6279 if (HONOR_INFINITIES (mode))
6280 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6281 build_real (TREE_TYPE (arg), c2));
6282
6283 /* sqrt(x) > y is always false, when y is very large
6284 and we don't care about infinities. */
6285 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6286 }
6287
6288 /* sqrt(x) > c is the same as x > c*c. */
6289 return fold_build2_loc (loc, code, type, arg,
6290 build_real (TREE_TYPE (arg), c2));
6291 }
6292 else if (code == LT_EXPR || code == LE_EXPR)
6293 {
6294 REAL_VALUE_TYPE c2;
6295
6296 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6297 real_convert (&c2, mode, &c2);
6298
6299 if (REAL_VALUE_ISINF (c2))
6300 {
6301 /* sqrt(x) < y is always true, when y is a very large
6302 value and we don't care about NaNs or Infinities. */
6303 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6304 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6305
6306 /* sqrt(x) < y is x != +Inf when y is very large and we
6307 don't care about NaNs. */
6308 if (! HONOR_NANS (mode))
6309 return fold_build2_loc (loc, NE_EXPR, type, arg,
6310 build_real (TREE_TYPE (arg), c2));
6311
6312 /* sqrt(x) < y is x >= 0 when y is very large and we
6313 don't care about Infinities. */
6314 if (! HONOR_INFINITIES (mode))
6315 return fold_build2_loc (loc, GE_EXPR, type, arg,
6316 build_real (TREE_TYPE (arg), dconst0));
6317
6318 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6319 arg = save_expr (arg);
6320 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6321 fold_build2_loc (loc, GE_EXPR, type, arg,
6322 build_real (TREE_TYPE (arg),
6323 dconst0)),
6324 fold_build2_loc (loc, NE_EXPR, type, arg,
6325 build_real (TREE_TYPE (arg),
6326 c2)));
6327 }
6328
6329 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6330 if (! HONOR_NANS (mode))
6331 return fold_build2_loc (loc, code, type, arg,
6332 build_real (TREE_TYPE (arg), c2));
6333
6334 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6335 arg = save_expr (arg);
6336 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6337 fold_build2_loc (loc, GE_EXPR, type, arg,
6338 build_real (TREE_TYPE (arg),
6339 dconst0)),
6340 fold_build2_loc (loc, code, type, arg,
6341 build_real (TREE_TYPE (arg),
6342 c2)));
6343 }
6344 }
6345
6346 return NULL_TREE;
6347 }
6348
6349 /* Subroutine of fold() that optimizes comparisons against Infinities,
6350 either +Inf or -Inf.
6351
6352 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6353 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6354 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6355
6356 The function returns the constant folded tree if a simplification
6357 can be made, and NULL_TREE otherwise. */
6358
6359 static tree
6360 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6361 tree arg0, tree arg1)
6362 {
6363 enum machine_mode mode;
6364 REAL_VALUE_TYPE max;
6365 tree temp;
6366 bool neg;
6367
6368 mode = TYPE_MODE (TREE_TYPE (arg0));
6369
6370 /* For negative infinity swap the sense of the comparison. */
6371 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6372 if (neg)
6373 code = swap_tree_comparison (code);
6374
6375 switch (code)
6376 {
6377 case GT_EXPR:
6378 /* x > +Inf is always false, if with ignore sNANs. */
6379 if (HONOR_SNANS (mode))
6380 return NULL_TREE;
6381 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6382
6383 case LE_EXPR:
6384 /* x <= +Inf is always true, if we don't case about NaNs. */
6385 if (! HONOR_NANS (mode))
6386 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6387
6388 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6389 arg0 = save_expr (arg0);
6390 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6391
6392 case EQ_EXPR:
6393 case GE_EXPR:
6394 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6395 real_maxval (&max, neg, mode);
6396 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6397 arg0, build_real (TREE_TYPE (arg0), max));
6398
6399 case LT_EXPR:
6400 /* x < +Inf is always equal to x <= DBL_MAX. */
6401 real_maxval (&max, neg, mode);
6402 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6403 arg0, build_real (TREE_TYPE (arg0), max));
6404
6405 case NE_EXPR:
6406 /* x != +Inf is always equal to !(x > DBL_MAX). */
6407 real_maxval (&max, neg, mode);
6408 if (! HONOR_NANS (mode))
6409 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6410 arg0, build_real (TREE_TYPE (arg0), max));
6411
6412 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6413 arg0, build_real (TREE_TYPE (arg0), max));
6414 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6415
6416 default:
6417 break;
6418 }
6419
6420 return NULL_TREE;
6421 }
6422
6423 /* Subroutine of fold() that optimizes comparisons of a division by
6424 a nonzero integer constant against an integer constant, i.e.
6425 X/C1 op C2.
6426
6427 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6428 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6429 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6430
6431 The function returns the constant folded tree if a simplification
6432 can be made, and NULL_TREE otherwise. */
6433
6434 static tree
6435 fold_div_compare (location_t loc,
6436 enum tree_code code, tree type, tree arg0, tree arg1)
6437 {
6438 tree prod, tmp, hi, lo;
6439 tree arg00 = TREE_OPERAND (arg0, 0);
6440 tree arg01 = TREE_OPERAND (arg0, 1);
6441 double_int val;
6442 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6443 bool neg_overflow;
6444 bool overflow;
6445
6446 /* We have to do this the hard way to detect unsigned overflow.
6447 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6448 val = TREE_INT_CST (arg01)
6449 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6450 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6451 neg_overflow = false;
6452
6453 if (unsigned_p)
6454 {
6455 tmp = int_const_binop (MINUS_EXPR, arg01,
6456 build_int_cst (TREE_TYPE (arg01), 1));
6457 lo = prod;
6458
6459 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6460 val = TREE_INT_CST (prod)
6461 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6462 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6463 -1, overflow | TREE_OVERFLOW (prod));
6464 }
6465 else if (tree_int_cst_sgn (arg01) >= 0)
6466 {
6467 tmp = int_const_binop (MINUS_EXPR, arg01,
6468 build_int_cst (TREE_TYPE (arg01), 1));
6469 switch (tree_int_cst_sgn (arg1))
6470 {
6471 case -1:
6472 neg_overflow = true;
6473 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6474 hi = prod;
6475 break;
6476
6477 case 0:
6478 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6479 hi = tmp;
6480 break;
6481
6482 case 1:
6483 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6484 lo = prod;
6485 break;
6486
6487 default:
6488 gcc_unreachable ();
6489 }
6490 }
6491 else
6492 {
6493 /* A negative divisor reverses the relational operators. */
6494 code = swap_tree_comparison (code);
6495
6496 tmp = int_const_binop (PLUS_EXPR, arg01,
6497 build_int_cst (TREE_TYPE (arg01), 1));
6498 switch (tree_int_cst_sgn (arg1))
6499 {
6500 case -1:
6501 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6502 lo = prod;
6503 break;
6504
6505 case 0:
6506 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6507 lo = tmp;
6508 break;
6509
6510 case 1:
6511 neg_overflow = true;
6512 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6513 hi = prod;
6514 break;
6515
6516 default:
6517 gcc_unreachable ();
6518 }
6519 }
6520
6521 switch (code)
6522 {
6523 case EQ_EXPR:
6524 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6525 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6526 if (TREE_OVERFLOW (hi))
6527 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6528 if (TREE_OVERFLOW (lo))
6529 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6530 return build_range_check (loc, type, arg00, 1, lo, hi);
6531
6532 case NE_EXPR:
6533 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6534 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6535 if (TREE_OVERFLOW (hi))
6536 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6537 if (TREE_OVERFLOW (lo))
6538 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6539 return build_range_check (loc, type, arg00, 0, lo, hi);
6540
6541 case LT_EXPR:
6542 if (TREE_OVERFLOW (lo))
6543 {
6544 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6545 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 }
6547 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6548
6549 case LE_EXPR:
6550 if (TREE_OVERFLOW (hi))
6551 {
6552 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6553 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 }
6555 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6556
6557 case GT_EXPR:
6558 if (TREE_OVERFLOW (hi))
6559 {
6560 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6561 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 }
6563 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6564
6565 case GE_EXPR:
6566 if (TREE_OVERFLOW (lo))
6567 {
6568 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6569 return omit_one_operand_loc (loc, type, tmp, arg00);
6570 }
6571 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6572
6573 default:
6574 break;
6575 }
6576
6577 return NULL_TREE;
6578 }
6579
6580
6581 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6582 equality/inequality test, then return a simplified form of the test
6583 using a sign testing. Otherwise return NULL. TYPE is the desired
6584 result type. */
6585
6586 static tree
6587 fold_single_bit_test_into_sign_test (location_t loc,
6588 enum tree_code code, tree arg0, tree arg1,
6589 tree result_type)
6590 {
6591 /* If this is testing a single bit, we can optimize the test. */
6592 if ((code == NE_EXPR || code == EQ_EXPR)
6593 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6594 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6595 {
6596 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6597 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6598 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6599
6600 if (arg00 != NULL_TREE
6601 /* This is only a win if casting to a signed type is cheap,
6602 i.e. when arg00's type is not a partial mode. */
6603 && TYPE_PRECISION (TREE_TYPE (arg00))
6604 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6605 {
6606 tree stype = signed_type_for (TREE_TYPE (arg00));
6607 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6608 result_type,
6609 fold_convert_loc (loc, stype, arg00),
6610 build_int_cst (stype, 0));
6611 }
6612 }
6613
6614 return NULL_TREE;
6615 }
6616
6617 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6618 equality/inequality test, then return a simplified form of
6619 the test using shifts and logical operations. Otherwise return
6620 NULL. TYPE is the desired result type. */
6621
6622 tree
6623 fold_single_bit_test (location_t loc, enum tree_code code,
6624 tree arg0, tree arg1, tree result_type)
6625 {
6626 /* If this is testing a single bit, we can optimize the test. */
6627 if ((code == NE_EXPR || code == EQ_EXPR)
6628 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6630 {
6631 tree inner = TREE_OPERAND (arg0, 0);
6632 tree type = TREE_TYPE (arg0);
6633 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6634 enum machine_mode operand_mode = TYPE_MODE (type);
6635 int ops_unsigned;
6636 tree signed_type, unsigned_type, intermediate_type;
6637 tree tem, one;
6638
6639 /* First, see if we can fold the single bit test into a sign-bit
6640 test. */
6641 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6642 result_type);
6643 if (tem)
6644 return tem;
6645
6646 /* Otherwise we have (A & C) != 0 where C is a single bit,
6647 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6648 Similarly for (A & C) == 0. */
6649
6650 /* If INNER is a right shift of a constant and it plus BITNUM does
6651 not overflow, adjust BITNUM and INNER. */
6652 if (TREE_CODE (inner) == RSHIFT_EXPR
6653 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6654 && tree_fits_uhwi_p (TREE_OPERAND (inner, 1))
6655 && bitnum < TYPE_PRECISION (type)
6656 && (tree_to_uhwi (TREE_OPERAND (inner, 1))
6657 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6658 {
6659 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6660 inner = TREE_OPERAND (inner, 0);
6661 }
6662
6663 /* If we are going to be able to omit the AND below, we must do our
6664 operations as unsigned. If we must use the AND, we have a choice.
6665 Normally unsigned is faster, but for some machines signed is. */
6666 #ifdef LOAD_EXTEND_OP
6667 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6668 && !flag_syntax_only) ? 0 : 1;
6669 #else
6670 ops_unsigned = 1;
6671 #endif
6672
6673 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6674 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6675 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6676 inner = fold_convert_loc (loc, intermediate_type, inner);
6677
6678 if (bitnum != 0)
6679 inner = build2 (RSHIFT_EXPR, intermediate_type,
6680 inner, size_int (bitnum));
6681
6682 one = build_int_cst (intermediate_type, 1);
6683
6684 if (code == EQ_EXPR)
6685 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6686
6687 /* Put the AND last so it can combine with more things. */
6688 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6689
6690 /* Make sure to return the proper type. */
6691 inner = fold_convert_loc (loc, result_type, inner);
6692
6693 return inner;
6694 }
6695 return NULL_TREE;
6696 }
6697
6698 /* Check whether we are allowed to reorder operands arg0 and arg1,
6699 such that the evaluation of arg1 occurs before arg0. */
6700
6701 static bool
6702 reorder_operands_p (const_tree arg0, const_tree arg1)
6703 {
6704 if (! flag_evaluation_order)
6705 return true;
6706 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6707 return true;
6708 return ! TREE_SIDE_EFFECTS (arg0)
6709 && ! TREE_SIDE_EFFECTS (arg1);
6710 }
6711
6712 /* Test whether it is preferable two swap two operands, ARG0 and
6713 ARG1, for example because ARG0 is an integer constant and ARG1
6714 isn't. If REORDER is true, only recommend swapping if we can
6715 evaluate the operands in reverse order. */
6716
6717 bool
6718 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6719 {
6720 STRIP_SIGN_NOPS (arg0);
6721 STRIP_SIGN_NOPS (arg1);
6722
6723 if (TREE_CODE (arg1) == INTEGER_CST)
6724 return 0;
6725 if (TREE_CODE (arg0) == INTEGER_CST)
6726 return 1;
6727
6728 if (TREE_CODE (arg1) == REAL_CST)
6729 return 0;
6730 if (TREE_CODE (arg0) == REAL_CST)
6731 return 1;
6732
6733 if (TREE_CODE (arg1) == FIXED_CST)
6734 return 0;
6735 if (TREE_CODE (arg0) == FIXED_CST)
6736 return 1;
6737
6738 if (TREE_CODE (arg1) == COMPLEX_CST)
6739 return 0;
6740 if (TREE_CODE (arg0) == COMPLEX_CST)
6741 return 1;
6742
6743 if (TREE_CONSTANT (arg1))
6744 return 0;
6745 if (TREE_CONSTANT (arg0))
6746 return 1;
6747
6748 if (optimize_function_for_size_p (cfun))
6749 return 0;
6750
6751 if (reorder && flag_evaluation_order
6752 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6753 return 0;
6754
6755 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6756 for commutative and comparison operators. Ensuring a canonical
6757 form allows the optimizers to find additional redundancies without
6758 having to explicitly check for both orderings. */
6759 if (TREE_CODE (arg0) == SSA_NAME
6760 && TREE_CODE (arg1) == SSA_NAME
6761 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6762 return 1;
6763
6764 /* Put SSA_NAMEs last. */
6765 if (TREE_CODE (arg1) == SSA_NAME)
6766 return 0;
6767 if (TREE_CODE (arg0) == SSA_NAME)
6768 return 1;
6769
6770 /* Put variables last. */
6771 if (DECL_P (arg1))
6772 return 0;
6773 if (DECL_P (arg0))
6774 return 1;
6775
6776 return 0;
6777 }
6778
6779 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6780 ARG0 is extended to a wider type. */
6781
6782 static tree
6783 fold_widened_comparison (location_t loc, enum tree_code code,
6784 tree type, tree arg0, tree arg1)
6785 {
6786 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6787 tree arg1_unw;
6788 tree shorter_type, outer_type;
6789 tree min, max;
6790 bool above, below;
6791
6792 if (arg0_unw == arg0)
6793 return NULL_TREE;
6794 shorter_type = TREE_TYPE (arg0_unw);
6795
6796 #ifdef HAVE_canonicalize_funcptr_for_compare
6797 /* Disable this optimization if we're casting a function pointer
6798 type on targets that require function pointer canonicalization. */
6799 if (HAVE_canonicalize_funcptr_for_compare
6800 && TREE_CODE (shorter_type) == POINTER_TYPE
6801 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6802 return NULL_TREE;
6803 #endif
6804
6805 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6806 return NULL_TREE;
6807
6808 arg1_unw = get_unwidened (arg1, NULL_TREE);
6809
6810 /* If possible, express the comparison in the shorter mode. */
6811 if ((code == EQ_EXPR || code == NE_EXPR
6812 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6813 && (TREE_TYPE (arg1_unw) == shorter_type
6814 || ((TYPE_PRECISION (shorter_type)
6815 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6816 && (TYPE_UNSIGNED (shorter_type)
6817 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6818 || (TREE_CODE (arg1_unw) == INTEGER_CST
6819 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6820 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6821 && int_fits_type_p (arg1_unw, shorter_type))))
6822 return fold_build2_loc (loc, code, type, arg0_unw,
6823 fold_convert_loc (loc, shorter_type, arg1_unw));
6824
6825 if (TREE_CODE (arg1_unw) != INTEGER_CST
6826 || TREE_CODE (shorter_type) != INTEGER_TYPE
6827 || !int_fits_type_p (arg1_unw, shorter_type))
6828 return NULL_TREE;
6829
6830 /* If we are comparing with the integer that does not fit into the range
6831 of the shorter type, the result is known. */
6832 outer_type = TREE_TYPE (arg1_unw);
6833 min = lower_bound_in_type (outer_type, shorter_type);
6834 max = upper_bound_in_type (outer_type, shorter_type);
6835
6836 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6837 max, arg1_unw));
6838 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6839 arg1_unw, min));
6840
6841 switch (code)
6842 {
6843 case EQ_EXPR:
6844 if (above || below)
6845 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6846 break;
6847
6848 case NE_EXPR:
6849 if (above || below)
6850 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6851 break;
6852
6853 case LT_EXPR:
6854 case LE_EXPR:
6855 if (above)
6856 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6857 else if (below)
6858 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6859
6860 case GT_EXPR:
6861 case GE_EXPR:
6862 if (above)
6863 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6864 else if (below)
6865 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6866
6867 default:
6868 break;
6869 }
6870
6871 return NULL_TREE;
6872 }
6873
6874 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6875 ARG0 just the signedness is changed. */
6876
6877 static tree
6878 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6879 tree arg0, tree arg1)
6880 {
6881 tree arg0_inner;
6882 tree inner_type, outer_type;
6883
6884 if (!CONVERT_EXPR_P (arg0))
6885 return NULL_TREE;
6886
6887 outer_type = TREE_TYPE (arg0);
6888 arg0_inner = TREE_OPERAND (arg0, 0);
6889 inner_type = TREE_TYPE (arg0_inner);
6890
6891 #ifdef HAVE_canonicalize_funcptr_for_compare
6892 /* Disable this optimization if we're casting a function pointer
6893 type on targets that require function pointer canonicalization. */
6894 if (HAVE_canonicalize_funcptr_for_compare
6895 && TREE_CODE (inner_type) == POINTER_TYPE
6896 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6897 return NULL_TREE;
6898 #endif
6899
6900 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6901 return NULL_TREE;
6902
6903 if (TREE_CODE (arg1) != INTEGER_CST
6904 && !(CONVERT_EXPR_P (arg1)
6905 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6906 return NULL_TREE;
6907
6908 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6909 && code != NE_EXPR
6910 && code != EQ_EXPR)
6911 return NULL_TREE;
6912
6913 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6914 return NULL_TREE;
6915
6916 if (TREE_CODE (arg1) == INTEGER_CST)
6917 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6918 0, TREE_OVERFLOW (arg1));
6919 else
6920 arg1 = fold_convert_loc (loc, inner_type, arg1);
6921
6922 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6923 }
6924
6925 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6926 step of the array. Reconstructs s and delta in the case of s *
6927 delta being an integer constant (and thus already folded). ADDR is
6928 the address. MULT is the multiplicative expression. If the
6929 function succeeds, the new address expression is returned.
6930 Otherwise NULL_TREE is returned. LOC is the location of the
6931 resulting expression. */
6932
6933 static tree
6934 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6935 {
6936 tree s, delta, step;
6937 tree ref = TREE_OPERAND (addr, 0), pref;
6938 tree ret, pos;
6939 tree itype;
6940 bool mdim = false;
6941
6942 /* Strip the nops that might be added when converting op1 to sizetype. */
6943 STRIP_NOPS (op1);
6944
6945 /* Canonicalize op1 into a possibly non-constant delta
6946 and an INTEGER_CST s. */
6947 if (TREE_CODE (op1) == MULT_EXPR)
6948 {
6949 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6950
6951 STRIP_NOPS (arg0);
6952 STRIP_NOPS (arg1);
6953
6954 if (TREE_CODE (arg0) == INTEGER_CST)
6955 {
6956 s = arg0;
6957 delta = arg1;
6958 }
6959 else if (TREE_CODE (arg1) == INTEGER_CST)
6960 {
6961 s = arg1;
6962 delta = arg0;
6963 }
6964 else
6965 return NULL_TREE;
6966 }
6967 else if (TREE_CODE (op1) == INTEGER_CST)
6968 {
6969 delta = op1;
6970 s = NULL_TREE;
6971 }
6972 else
6973 {
6974 /* Simulate we are delta * 1. */
6975 delta = op1;
6976 s = integer_one_node;
6977 }
6978
6979 /* Handle &x.array the same as we would handle &x.array[0]. */
6980 if (TREE_CODE (ref) == COMPONENT_REF
6981 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6982 {
6983 tree domain;
6984
6985 /* Remember if this was a multi-dimensional array. */
6986 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6987 mdim = true;
6988
6989 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6990 if (! domain)
6991 goto cont;
6992 itype = TREE_TYPE (domain);
6993
6994 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6995 if (TREE_CODE (step) != INTEGER_CST)
6996 goto cont;
6997
6998 if (s)
6999 {
7000 if (! tree_int_cst_equal (step, s))
7001 goto cont;
7002 }
7003 else
7004 {
7005 /* Try if delta is a multiple of step. */
7006 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7007 if (! tmp)
7008 goto cont;
7009 delta = tmp;
7010 }
7011
7012 /* Only fold here if we can verify we do not overflow one
7013 dimension of a multi-dimensional array. */
7014 if (mdim)
7015 {
7016 tree tmp;
7017
7018 if (!TYPE_MIN_VALUE (domain)
7019 || !TYPE_MAX_VALUE (domain)
7020 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7021 goto cont;
7022
7023 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7024 fold_convert_loc (loc, itype,
7025 TYPE_MIN_VALUE (domain)),
7026 fold_convert_loc (loc, itype, delta));
7027 if (TREE_CODE (tmp) != INTEGER_CST
7028 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7029 goto cont;
7030 }
7031
7032 /* We found a suitable component reference. */
7033
7034 pref = TREE_OPERAND (addr, 0);
7035 ret = copy_node (pref);
7036 SET_EXPR_LOCATION (ret, loc);
7037
7038 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7039 fold_build2_loc
7040 (loc, PLUS_EXPR, itype,
7041 fold_convert_loc (loc, itype,
7042 TYPE_MIN_VALUE
7043 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7044 fold_convert_loc (loc, itype, delta)),
7045 NULL_TREE, NULL_TREE);
7046 return build_fold_addr_expr_loc (loc, ret);
7047 }
7048
7049 cont:
7050
7051 for (;; ref = TREE_OPERAND (ref, 0))
7052 {
7053 if (TREE_CODE (ref) == ARRAY_REF)
7054 {
7055 tree domain;
7056
7057 /* Remember if this was a multi-dimensional array. */
7058 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7059 mdim = true;
7060
7061 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7062 if (! domain)
7063 continue;
7064 itype = TREE_TYPE (domain);
7065
7066 step = array_ref_element_size (ref);
7067 if (TREE_CODE (step) != INTEGER_CST)
7068 continue;
7069
7070 if (s)
7071 {
7072 if (! tree_int_cst_equal (step, s))
7073 continue;
7074 }
7075 else
7076 {
7077 /* Try if delta is a multiple of step. */
7078 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7079 if (! tmp)
7080 continue;
7081 delta = tmp;
7082 }
7083
7084 /* Only fold here if we can verify we do not overflow one
7085 dimension of a multi-dimensional array. */
7086 if (mdim)
7087 {
7088 tree tmp;
7089
7090 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7091 || !TYPE_MAX_VALUE (domain)
7092 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7093 continue;
7094
7095 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7096 fold_convert_loc (loc, itype,
7097 TREE_OPERAND (ref, 1)),
7098 fold_convert_loc (loc, itype, delta));
7099 if (!tmp
7100 || TREE_CODE (tmp) != INTEGER_CST
7101 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7102 continue;
7103 }
7104
7105 break;
7106 }
7107 else
7108 mdim = false;
7109
7110 if (!handled_component_p (ref))
7111 return NULL_TREE;
7112 }
7113
7114 /* We found the suitable array reference. So copy everything up to it,
7115 and replace the index. */
7116
7117 pref = TREE_OPERAND (addr, 0);
7118 ret = copy_node (pref);
7119 SET_EXPR_LOCATION (ret, loc);
7120 pos = ret;
7121
7122 while (pref != ref)
7123 {
7124 pref = TREE_OPERAND (pref, 0);
7125 TREE_OPERAND (pos, 0) = copy_node (pref);
7126 pos = TREE_OPERAND (pos, 0);
7127 }
7128
7129 TREE_OPERAND (pos, 1)
7130 = fold_build2_loc (loc, PLUS_EXPR, itype,
7131 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7132 fold_convert_loc (loc, itype, delta));
7133 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7134 }
7135
7136
7137 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7138 means A >= Y && A != MAX, but in this case we know that
7139 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7140
7141 static tree
7142 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7143 {
7144 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7145
7146 if (TREE_CODE (bound) == LT_EXPR)
7147 a = TREE_OPERAND (bound, 0);
7148 else if (TREE_CODE (bound) == GT_EXPR)
7149 a = TREE_OPERAND (bound, 1);
7150 else
7151 return NULL_TREE;
7152
7153 typea = TREE_TYPE (a);
7154 if (!INTEGRAL_TYPE_P (typea)
7155 && !POINTER_TYPE_P (typea))
7156 return NULL_TREE;
7157
7158 if (TREE_CODE (ineq) == LT_EXPR)
7159 {
7160 a1 = TREE_OPERAND (ineq, 1);
7161 y = TREE_OPERAND (ineq, 0);
7162 }
7163 else if (TREE_CODE (ineq) == GT_EXPR)
7164 {
7165 a1 = TREE_OPERAND (ineq, 0);
7166 y = TREE_OPERAND (ineq, 1);
7167 }
7168 else
7169 return NULL_TREE;
7170
7171 if (TREE_TYPE (a1) != typea)
7172 return NULL_TREE;
7173
7174 if (POINTER_TYPE_P (typea))
7175 {
7176 /* Convert the pointer types into integer before taking the difference. */
7177 tree ta = fold_convert_loc (loc, ssizetype, a);
7178 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7179 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7180 }
7181 else
7182 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7183
7184 if (!diff || !integer_onep (diff))
7185 return NULL_TREE;
7186
7187 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7188 }
7189
7190 /* Fold a sum or difference of at least one multiplication.
7191 Returns the folded tree or NULL if no simplification could be made. */
7192
7193 static tree
7194 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7195 tree arg0, tree arg1)
7196 {
7197 tree arg00, arg01, arg10, arg11;
7198 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7199
7200 /* (A * C) +- (B * C) -> (A+-B) * C.
7201 (A * C) +- A -> A * (C+-1).
7202 We are most concerned about the case where C is a constant,
7203 but other combinations show up during loop reduction. Since
7204 it is not difficult, try all four possibilities. */
7205
7206 if (TREE_CODE (arg0) == MULT_EXPR)
7207 {
7208 arg00 = TREE_OPERAND (arg0, 0);
7209 arg01 = TREE_OPERAND (arg0, 1);
7210 }
7211 else if (TREE_CODE (arg0) == INTEGER_CST)
7212 {
7213 arg00 = build_one_cst (type);
7214 arg01 = arg0;
7215 }
7216 else
7217 {
7218 /* We cannot generate constant 1 for fract. */
7219 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7220 return NULL_TREE;
7221 arg00 = arg0;
7222 arg01 = build_one_cst (type);
7223 }
7224 if (TREE_CODE (arg1) == MULT_EXPR)
7225 {
7226 arg10 = TREE_OPERAND (arg1, 0);
7227 arg11 = TREE_OPERAND (arg1, 1);
7228 }
7229 else if (TREE_CODE (arg1) == INTEGER_CST)
7230 {
7231 arg10 = build_one_cst (type);
7232 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7233 the purpose of this canonicalization. */
7234 if (TREE_INT_CST_HIGH (arg1) == -1
7235 && negate_expr_p (arg1)
7236 && code == PLUS_EXPR)
7237 {
7238 arg11 = negate_expr (arg1);
7239 code = MINUS_EXPR;
7240 }
7241 else
7242 arg11 = arg1;
7243 }
7244 else
7245 {
7246 /* We cannot generate constant 1 for fract. */
7247 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7248 return NULL_TREE;
7249 arg10 = arg1;
7250 arg11 = build_one_cst (type);
7251 }
7252 same = NULL_TREE;
7253
7254 if (operand_equal_p (arg01, arg11, 0))
7255 same = arg01, alt0 = arg00, alt1 = arg10;
7256 else if (operand_equal_p (arg00, arg10, 0))
7257 same = arg00, alt0 = arg01, alt1 = arg11;
7258 else if (operand_equal_p (arg00, arg11, 0))
7259 same = arg00, alt0 = arg01, alt1 = arg10;
7260 else if (operand_equal_p (arg01, arg10, 0))
7261 same = arg01, alt0 = arg00, alt1 = arg11;
7262
7263 /* No identical multiplicands; see if we can find a common
7264 power-of-two factor in non-power-of-two multiplies. This
7265 can help in multi-dimensional array access. */
7266 else if (tree_fits_shwi_p (arg01)
7267 && tree_fits_shwi_p (arg11))
7268 {
7269 HOST_WIDE_INT int01, int11, tmp;
7270 bool swap = false;
7271 tree maybe_same;
7272 int01 = tree_to_shwi (arg01);
7273 int11 = tree_to_shwi (arg11);
7274
7275 /* Move min of absolute values to int11. */
7276 if (absu_hwi (int01) < absu_hwi (int11))
7277 {
7278 tmp = int01, int01 = int11, int11 = tmp;
7279 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7280 maybe_same = arg01;
7281 swap = true;
7282 }
7283 else
7284 maybe_same = arg11;
7285
7286 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7287 /* The remainder should not be a constant, otherwise we
7288 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7289 increased the number of multiplications necessary. */
7290 && TREE_CODE (arg10) != INTEGER_CST)
7291 {
7292 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7293 build_int_cst (TREE_TYPE (arg00),
7294 int01 / int11));
7295 alt1 = arg10;
7296 same = maybe_same;
7297 if (swap)
7298 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7299 }
7300 }
7301
7302 if (same)
7303 return fold_build2_loc (loc, MULT_EXPR, type,
7304 fold_build2_loc (loc, code, type,
7305 fold_convert_loc (loc, type, alt0),
7306 fold_convert_loc (loc, type, alt1)),
7307 fold_convert_loc (loc, type, same));
7308
7309 return NULL_TREE;
7310 }
7311
7312 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7313 specified by EXPR into the buffer PTR of length LEN bytes.
7314 Return the number of bytes placed in the buffer, or zero
7315 upon failure. */
7316
7317 static int
7318 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7319 {
7320 tree type = TREE_TYPE (expr);
7321 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7322 int byte, offset, word, words;
7323 unsigned char value;
7324
7325 if (total_bytes > len)
7326 return 0;
7327 words = total_bytes / UNITS_PER_WORD;
7328
7329 for (byte = 0; byte < total_bytes; byte++)
7330 {
7331 int bitpos = byte * BITS_PER_UNIT;
7332 if (bitpos < HOST_BITS_PER_WIDE_INT)
7333 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7334 else
7335 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7336 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7337
7338 if (total_bytes > UNITS_PER_WORD)
7339 {
7340 word = byte / UNITS_PER_WORD;
7341 if (WORDS_BIG_ENDIAN)
7342 word = (words - 1) - word;
7343 offset = word * UNITS_PER_WORD;
7344 if (BYTES_BIG_ENDIAN)
7345 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7346 else
7347 offset += byte % UNITS_PER_WORD;
7348 }
7349 else
7350 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7351 ptr[offset] = value;
7352 }
7353 return total_bytes;
7354 }
7355
7356
7357 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7361
7362 static int
7363 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7364 {
7365 tree type = TREE_TYPE (expr);
7366 enum machine_mode mode = TYPE_MODE (type);
7367 int total_bytes = GET_MODE_SIZE (mode);
7368 FIXED_VALUE_TYPE value;
7369 tree i_value, i_type;
7370
7371 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7372 return 0;
7373
7374 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7375
7376 if (NULL_TREE == i_type
7377 || TYPE_PRECISION (i_type) != total_bytes)
7378 return 0;
7379
7380 value = TREE_FIXED_CST (expr);
7381 i_value = double_int_to_tree (i_type, value.data);
7382
7383 return native_encode_int (i_value, ptr, len);
7384 }
7385
7386
7387 /* Subroutine of native_encode_expr. Encode the REAL_CST
7388 specified by EXPR into the buffer PTR of length LEN bytes.
7389 Return the number of bytes placed in the buffer, or zero
7390 upon failure. */
7391
7392 static int
7393 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7394 {
7395 tree type = TREE_TYPE (expr);
7396 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7397 int byte, offset, word, words, bitpos;
7398 unsigned char value;
7399
7400 /* There are always 32 bits in each long, no matter the size of
7401 the hosts long. We handle floating point representations with
7402 up to 192 bits. */
7403 long tmp[6];
7404
7405 if (total_bytes > len)
7406 return 0;
7407 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7408
7409 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7410
7411 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7412 bitpos += BITS_PER_UNIT)
7413 {
7414 byte = (bitpos / BITS_PER_UNIT) & 3;
7415 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7416
7417 if (UNITS_PER_WORD < 4)
7418 {
7419 word = byte / UNITS_PER_WORD;
7420 if (WORDS_BIG_ENDIAN)
7421 word = (words - 1) - word;
7422 offset = word * UNITS_PER_WORD;
7423 if (BYTES_BIG_ENDIAN)
7424 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7425 else
7426 offset += byte % UNITS_PER_WORD;
7427 }
7428 else
7429 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7430 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7431 }
7432 return total_bytes;
7433 }
7434
7435 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7436 specified by EXPR into the buffer PTR of length LEN bytes.
7437 Return the number of bytes placed in the buffer, or zero
7438 upon failure. */
7439
7440 static int
7441 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7442 {
7443 int rsize, isize;
7444 tree part;
7445
7446 part = TREE_REALPART (expr);
7447 rsize = native_encode_expr (part, ptr, len);
7448 if (rsize == 0)
7449 return 0;
7450 part = TREE_IMAGPART (expr);
7451 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7452 if (isize != rsize)
7453 return 0;
7454 return rsize + isize;
7455 }
7456
7457
7458 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7459 specified by EXPR into the buffer PTR of length LEN bytes.
7460 Return the number of bytes placed in the buffer, or zero
7461 upon failure. */
7462
7463 static int
7464 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7465 {
7466 unsigned i, count;
7467 int size, offset;
7468 tree itype, elem;
7469
7470 offset = 0;
7471 count = VECTOR_CST_NELTS (expr);
7472 itype = TREE_TYPE (TREE_TYPE (expr));
7473 size = GET_MODE_SIZE (TYPE_MODE (itype));
7474 for (i = 0; i < count; i++)
7475 {
7476 elem = VECTOR_CST_ELT (expr, i);
7477 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7478 return 0;
7479 offset += size;
7480 }
7481 return offset;
7482 }
7483
7484
7485 /* Subroutine of native_encode_expr. Encode the STRING_CST
7486 specified by EXPR into the buffer PTR of length LEN bytes.
7487 Return the number of bytes placed in the buffer, or zero
7488 upon failure. */
7489
7490 static int
7491 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7492 {
7493 tree type = TREE_TYPE (expr);
7494 HOST_WIDE_INT total_bytes;
7495
7496 if (TREE_CODE (type) != ARRAY_TYPE
7497 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7498 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7499 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7500 return 0;
7501 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7502 if (total_bytes > len)
7503 return 0;
7504 if (TREE_STRING_LENGTH (expr) < total_bytes)
7505 {
7506 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7507 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7508 total_bytes - TREE_STRING_LENGTH (expr));
7509 }
7510 else
7511 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7512 return total_bytes;
7513 }
7514
7515
7516 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7517 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7518 buffer PTR of length LEN bytes. Return the number of bytes
7519 placed in the buffer, or zero upon failure. */
7520
7521 int
7522 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7523 {
7524 switch (TREE_CODE (expr))
7525 {
7526 case INTEGER_CST:
7527 return native_encode_int (expr, ptr, len);
7528
7529 case REAL_CST:
7530 return native_encode_real (expr, ptr, len);
7531
7532 case FIXED_CST:
7533 return native_encode_fixed (expr, ptr, len);
7534
7535 case COMPLEX_CST:
7536 return native_encode_complex (expr, ptr, len);
7537
7538 case VECTOR_CST:
7539 return native_encode_vector (expr, ptr, len);
7540
7541 case STRING_CST:
7542 return native_encode_string (expr, ptr, len);
7543
7544 default:
7545 return 0;
7546 }
7547 }
7548
7549
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7553
7554 static tree
7555 native_interpret_int (tree type, const unsigned char *ptr, int len)
7556 {
7557 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7558 double_int result;
7559
7560 if (total_bytes > len
7561 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7562 return NULL_TREE;
7563
7564 result = double_int::from_buffer (ptr, total_bytes);
7565
7566 return double_int_to_tree (type, result);
7567 }
7568
7569
7570 /* Subroutine of native_interpret_expr. Interpret the contents of
7571 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7572 If the buffer cannot be interpreted, return NULL_TREE. */
7573
7574 static tree
7575 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7576 {
7577 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7578 double_int result;
7579 FIXED_VALUE_TYPE fixed_value;
7580
7581 if (total_bytes > len
7582 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7583 return NULL_TREE;
7584
7585 result = double_int::from_buffer (ptr, total_bytes);
7586 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7587
7588 return build_fixed (type, fixed_value);
7589 }
7590
7591
7592 /* Subroutine of native_interpret_expr. Interpret the contents of
7593 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7594 If the buffer cannot be interpreted, return NULL_TREE. */
7595
7596 static tree
7597 native_interpret_real (tree type, const unsigned char *ptr, int len)
7598 {
7599 enum machine_mode mode = TYPE_MODE (type);
7600 int total_bytes = GET_MODE_SIZE (mode);
7601 int byte, offset, word, words, bitpos;
7602 unsigned char value;
7603 /* There are always 32 bits in each long, no matter the size of
7604 the hosts long. We handle floating point representations with
7605 up to 192 bits. */
7606 REAL_VALUE_TYPE r;
7607 long tmp[6];
7608
7609 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7610 if (total_bytes > len || total_bytes > 24)
7611 return NULL_TREE;
7612 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7613
7614 memset (tmp, 0, sizeof (tmp));
7615 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7616 bitpos += BITS_PER_UNIT)
7617 {
7618 byte = (bitpos / BITS_PER_UNIT) & 3;
7619 if (UNITS_PER_WORD < 4)
7620 {
7621 word = byte / UNITS_PER_WORD;
7622 if (WORDS_BIG_ENDIAN)
7623 word = (words - 1) - word;
7624 offset = word * UNITS_PER_WORD;
7625 if (BYTES_BIG_ENDIAN)
7626 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7627 else
7628 offset += byte % UNITS_PER_WORD;
7629 }
7630 else
7631 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7632 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7633
7634 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7635 }
7636
7637 real_from_target (&r, tmp, mode);
7638 return build_real (type, r);
7639 }
7640
7641
7642 /* Subroutine of native_interpret_expr. Interpret the contents of
7643 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7644 If the buffer cannot be interpreted, return NULL_TREE. */
7645
7646 static tree
7647 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7648 {
7649 tree etype, rpart, ipart;
7650 int size;
7651
7652 etype = TREE_TYPE (type);
7653 size = GET_MODE_SIZE (TYPE_MODE (etype));
7654 if (size * 2 > len)
7655 return NULL_TREE;
7656 rpart = native_interpret_expr (etype, ptr, size);
7657 if (!rpart)
7658 return NULL_TREE;
7659 ipart = native_interpret_expr (etype, ptr+size, size);
7660 if (!ipart)
7661 return NULL_TREE;
7662 return build_complex (type, rpart, ipart);
7663 }
7664
7665
7666 /* Subroutine of native_interpret_expr. Interpret the contents of
7667 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7668 If the buffer cannot be interpreted, return NULL_TREE. */
7669
7670 static tree
7671 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7672 {
7673 tree etype, elem;
7674 int i, size, count;
7675 tree *elements;
7676
7677 etype = TREE_TYPE (type);
7678 size = GET_MODE_SIZE (TYPE_MODE (etype));
7679 count = TYPE_VECTOR_SUBPARTS (type);
7680 if (size * count > len)
7681 return NULL_TREE;
7682
7683 elements = XALLOCAVEC (tree, count);
7684 for (i = count - 1; i >= 0; i--)
7685 {
7686 elem = native_interpret_expr (etype, ptr+(i*size), size);
7687 if (!elem)
7688 return NULL_TREE;
7689 elements[i] = elem;
7690 }
7691 return build_vector (type, elements);
7692 }
7693
7694
7695 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7696 the buffer PTR of length LEN as a constant of type TYPE. For
7697 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7698 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7699 return NULL_TREE. */
7700
7701 tree
7702 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7703 {
7704 switch (TREE_CODE (type))
7705 {
7706 case INTEGER_TYPE:
7707 case ENUMERAL_TYPE:
7708 case BOOLEAN_TYPE:
7709 case POINTER_TYPE:
7710 case REFERENCE_TYPE:
7711 return native_interpret_int (type, ptr, len);
7712
7713 case REAL_TYPE:
7714 return native_interpret_real (type, ptr, len);
7715
7716 case FIXED_POINT_TYPE:
7717 return native_interpret_fixed (type, ptr, len);
7718
7719 case COMPLEX_TYPE:
7720 return native_interpret_complex (type, ptr, len);
7721
7722 case VECTOR_TYPE:
7723 return native_interpret_vector (type, ptr, len);
7724
7725 default:
7726 return NULL_TREE;
7727 }
7728 }
7729
7730 /* Returns true if we can interpret the contents of a native encoding
7731 as TYPE. */
7732
7733 static bool
7734 can_native_interpret_type_p (tree type)
7735 {
7736 switch (TREE_CODE (type))
7737 {
7738 case INTEGER_TYPE:
7739 case ENUMERAL_TYPE:
7740 case BOOLEAN_TYPE:
7741 case POINTER_TYPE:
7742 case REFERENCE_TYPE:
7743 case FIXED_POINT_TYPE:
7744 case REAL_TYPE:
7745 case COMPLEX_TYPE:
7746 case VECTOR_TYPE:
7747 return true;
7748 default:
7749 return false;
7750 }
7751 }
7752
7753 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7754 TYPE at compile-time. If we're unable to perform the conversion
7755 return NULL_TREE. */
7756
7757 static tree
7758 fold_view_convert_expr (tree type, tree expr)
7759 {
7760 /* We support up to 512-bit values (for V8DFmode). */
7761 unsigned char buffer[64];
7762 int len;
7763
7764 /* Check that the host and target are sane. */
7765 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7766 return NULL_TREE;
7767
7768 len = native_encode_expr (expr, buffer, sizeof (buffer));
7769 if (len == 0)
7770 return NULL_TREE;
7771
7772 return native_interpret_expr (type, buffer, len);
7773 }
7774
7775 /* Build an expression for the address of T. Folds away INDIRECT_REF
7776 to avoid confusing the gimplify process. */
7777
7778 tree
7779 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7780 {
7781 /* The size of the object is not relevant when talking about its address. */
7782 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7783 t = TREE_OPERAND (t, 0);
7784
7785 if (TREE_CODE (t) == INDIRECT_REF)
7786 {
7787 t = TREE_OPERAND (t, 0);
7788
7789 if (TREE_TYPE (t) != ptrtype)
7790 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7791 }
7792 else if (TREE_CODE (t) == MEM_REF
7793 && integer_zerop (TREE_OPERAND (t, 1)))
7794 return TREE_OPERAND (t, 0);
7795 else if (TREE_CODE (t) == MEM_REF
7796 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7797 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7798 TREE_OPERAND (t, 0),
7799 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7800 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7801 {
7802 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7803
7804 if (TREE_TYPE (t) != ptrtype)
7805 t = fold_convert_loc (loc, ptrtype, t);
7806 }
7807 else
7808 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7809
7810 return t;
7811 }
7812
7813 /* Build an expression for the address of T. */
7814
7815 tree
7816 build_fold_addr_expr_loc (location_t loc, tree t)
7817 {
7818 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7819
7820 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7821 }
7822
7823 static bool vec_cst_ctor_to_array (tree, tree *);
7824
7825 /* Fold a unary expression of code CODE and type TYPE with operand
7826 OP0. Return the folded expression if folding is successful.
7827 Otherwise, return NULL_TREE. */
7828
7829 tree
7830 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7831 {
7832 tree tem;
7833 tree arg0;
7834 enum tree_code_class kind = TREE_CODE_CLASS (code);
7835
7836 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7837 && TREE_CODE_LENGTH (code) == 1);
7838
7839 arg0 = op0;
7840 if (arg0)
7841 {
7842 if (CONVERT_EXPR_CODE_P (code)
7843 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7844 {
7845 /* Don't use STRIP_NOPS, because signedness of argument type
7846 matters. */
7847 STRIP_SIGN_NOPS (arg0);
7848 }
7849 else
7850 {
7851 /* Strip any conversions that don't change the mode. This
7852 is safe for every expression, except for a comparison
7853 expression because its signedness is derived from its
7854 operands.
7855
7856 Note that this is done as an internal manipulation within
7857 the constant folder, in order to find the simplest
7858 representation of the arguments so that their form can be
7859 studied. In any cases, the appropriate type conversions
7860 should be put back in the tree that will get out of the
7861 constant folder. */
7862 STRIP_NOPS (arg0);
7863 }
7864 }
7865
7866 if (TREE_CODE_CLASS (code) == tcc_unary)
7867 {
7868 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7869 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7870 fold_build1_loc (loc, code, type,
7871 fold_convert_loc (loc, TREE_TYPE (op0),
7872 TREE_OPERAND (arg0, 1))));
7873 else if (TREE_CODE (arg0) == COND_EXPR)
7874 {
7875 tree arg01 = TREE_OPERAND (arg0, 1);
7876 tree arg02 = TREE_OPERAND (arg0, 2);
7877 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7878 arg01 = fold_build1_loc (loc, code, type,
7879 fold_convert_loc (loc,
7880 TREE_TYPE (op0), arg01));
7881 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7882 arg02 = fold_build1_loc (loc, code, type,
7883 fold_convert_loc (loc,
7884 TREE_TYPE (op0), arg02));
7885 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7886 arg01, arg02);
7887
7888 /* If this was a conversion, and all we did was to move into
7889 inside the COND_EXPR, bring it back out. But leave it if
7890 it is a conversion from integer to integer and the
7891 result precision is no wider than a word since such a
7892 conversion is cheap and may be optimized away by combine,
7893 while it couldn't if it were outside the COND_EXPR. Then return
7894 so we don't get into an infinite recursion loop taking the
7895 conversion out and then back in. */
7896
7897 if ((CONVERT_EXPR_CODE_P (code)
7898 || code == NON_LVALUE_EXPR)
7899 && TREE_CODE (tem) == COND_EXPR
7900 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7901 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7902 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7903 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7904 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7905 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7906 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7907 && (INTEGRAL_TYPE_P
7908 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7909 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7910 || flag_syntax_only))
7911 tem = build1_loc (loc, code, type,
7912 build3 (COND_EXPR,
7913 TREE_TYPE (TREE_OPERAND
7914 (TREE_OPERAND (tem, 1), 0)),
7915 TREE_OPERAND (tem, 0),
7916 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7917 TREE_OPERAND (TREE_OPERAND (tem, 2),
7918 0)));
7919 return tem;
7920 }
7921 }
7922
7923 switch (code)
7924 {
7925 case PAREN_EXPR:
7926 /* Re-association barriers around constants and other re-association
7927 barriers can be removed. */
7928 if (CONSTANT_CLASS_P (op0)
7929 || TREE_CODE (op0) == PAREN_EXPR)
7930 return fold_convert_loc (loc, type, op0);
7931 return NULL_TREE;
7932
7933 CASE_CONVERT:
7934 case FLOAT_EXPR:
7935 case FIX_TRUNC_EXPR:
7936 if (TREE_TYPE (op0) == type)
7937 return op0;
7938
7939 if (COMPARISON_CLASS_P (op0))
7940 {
7941 /* If we have (type) (a CMP b) and type is an integral type, return
7942 new expression involving the new type. Canonicalize
7943 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7944 non-integral type.
7945 Do not fold the result as that would not simplify further, also
7946 folding again results in recursions. */
7947 if (TREE_CODE (type) == BOOLEAN_TYPE)
7948 return build2_loc (loc, TREE_CODE (op0), type,
7949 TREE_OPERAND (op0, 0),
7950 TREE_OPERAND (op0, 1));
7951 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7952 && TREE_CODE (type) != VECTOR_TYPE)
7953 return build3_loc (loc, COND_EXPR, type, op0,
7954 constant_boolean_node (true, type),
7955 constant_boolean_node (false, type));
7956 }
7957
7958 /* Handle cases of two conversions in a row. */
7959 if (CONVERT_EXPR_P (op0))
7960 {
7961 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7962 tree inter_type = TREE_TYPE (op0);
7963 int inside_int = INTEGRAL_TYPE_P (inside_type);
7964 int inside_ptr = POINTER_TYPE_P (inside_type);
7965 int inside_float = FLOAT_TYPE_P (inside_type);
7966 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7967 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7968 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7969 int inter_int = INTEGRAL_TYPE_P (inter_type);
7970 int inter_ptr = POINTER_TYPE_P (inter_type);
7971 int inter_float = FLOAT_TYPE_P (inter_type);
7972 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7973 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7974 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7975 int final_int = INTEGRAL_TYPE_P (type);
7976 int final_ptr = POINTER_TYPE_P (type);
7977 int final_float = FLOAT_TYPE_P (type);
7978 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7979 unsigned int final_prec = TYPE_PRECISION (type);
7980 int final_unsignedp = TYPE_UNSIGNED (type);
7981
7982 /* In addition to the cases of two conversions in a row
7983 handled below, if we are converting something to its own
7984 type via an object of identical or wider precision, neither
7985 conversion is needed. */
7986 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7987 && (((inter_int || inter_ptr) && final_int)
7988 || (inter_float && final_float))
7989 && inter_prec >= final_prec)
7990 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7991
7992 /* Likewise, if the intermediate and initial types are either both
7993 float or both integer, we don't need the middle conversion if the
7994 former is wider than the latter and doesn't change the signedness
7995 (for integers). Avoid this if the final type is a pointer since
7996 then we sometimes need the middle conversion. Likewise if the
7997 final type has a precision not equal to the size of its mode. */
7998 if (((inter_int && inside_int)
7999 || (inter_float && inside_float)
8000 || (inter_vec && inside_vec))
8001 && inter_prec >= inside_prec
8002 && (inter_float || inter_vec
8003 || inter_unsignedp == inside_unsignedp)
8004 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8005 && TYPE_MODE (type) == TYPE_MODE (inter_type))
8006 && ! final_ptr
8007 && (! final_vec || inter_prec == inside_prec))
8008 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8009
8010 /* If we have a sign-extension of a zero-extended value, we can
8011 replace that by a single zero-extension. Likewise if the
8012 final conversion does not change precision we can drop the
8013 intermediate conversion. */
8014 if (inside_int && inter_int && final_int
8015 && ((inside_prec < inter_prec && inter_prec < final_prec
8016 && inside_unsignedp && !inter_unsignedp)
8017 || final_prec == inter_prec))
8018 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8019
8020 /* Two conversions in a row are not needed unless:
8021 - some conversion is floating-point (overstrict for now), or
8022 - some conversion is a vector (overstrict for now), or
8023 - the intermediate type is narrower than both initial and
8024 final, or
8025 - the intermediate type and innermost type differ in signedness,
8026 and the outermost type is wider than the intermediate, or
8027 - the initial type is a pointer type and the precisions of the
8028 intermediate and final types differ, or
8029 - the final type is a pointer type and the precisions of the
8030 initial and intermediate types differ. */
8031 if (! inside_float && ! inter_float && ! final_float
8032 && ! inside_vec && ! inter_vec && ! final_vec
8033 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8034 && ! (inside_int && inter_int
8035 && inter_unsignedp != inside_unsignedp
8036 && inter_prec < final_prec)
8037 && ((inter_unsignedp && inter_prec > inside_prec)
8038 == (final_unsignedp && final_prec > inter_prec))
8039 && ! (inside_ptr && inter_prec != final_prec)
8040 && ! (final_ptr && inside_prec != inter_prec)
8041 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8042 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8043 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8044 }
8045
8046 /* Handle (T *)&A.B.C for A being of type T and B and C
8047 living at offset zero. This occurs frequently in
8048 C++ upcasting and then accessing the base. */
8049 if (TREE_CODE (op0) == ADDR_EXPR
8050 && POINTER_TYPE_P (type)
8051 && handled_component_p (TREE_OPERAND (op0, 0)))
8052 {
8053 HOST_WIDE_INT bitsize, bitpos;
8054 tree offset;
8055 enum machine_mode mode;
8056 int unsignedp, volatilep;
8057 tree base = TREE_OPERAND (op0, 0);
8058 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8059 &mode, &unsignedp, &volatilep);
8060 /* If the reference was to a (constant) zero offset, we can use
8061 the address of the base if it has the same base type
8062 as the result type and the pointer type is unqualified. */
8063 if (! offset && bitpos == 0
8064 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8065 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8066 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8067 return fold_convert_loc (loc, type,
8068 build_fold_addr_expr_loc (loc, base));
8069 }
8070
8071 if (TREE_CODE (op0) == MODIFY_EXPR
8072 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8073 /* Detect assigning a bitfield. */
8074 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8075 && DECL_BIT_FIELD
8076 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8077 {
8078 /* Don't leave an assignment inside a conversion
8079 unless assigning a bitfield. */
8080 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8081 /* First do the assignment, then return converted constant. */
8082 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8083 TREE_NO_WARNING (tem) = 1;
8084 TREE_USED (tem) = 1;
8085 return tem;
8086 }
8087
8088 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8089 constants (if x has signed type, the sign bit cannot be set
8090 in c). This folds extension into the BIT_AND_EXPR.
8091 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8092 very likely don't have maximal range for their precision and this
8093 transformation effectively doesn't preserve non-maximal ranges. */
8094 if (TREE_CODE (type) == INTEGER_TYPE
8095 && TREE_CODE (op0) == BIT_AND_EXPR
8096 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8097 {
8098 tree and_expr = op0;
8099 tree and0 = TREE_OPERAND (and_expr, 0);
8100 tree and1 = TREE_OPERAND (and_expr, 1);
8101 int change = 0;
8102
8103 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8104 || (TYPE_PRECISION (type)
8105 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8106 change = 1;
8107 else if (TYPE_PRECISION (TREE_TYPE (and1))
8108 <= HOST_BITS_PER_WIDE_INT
8109 && tree_fits_uhwi_p (and1))
8110 {
8111 unsigned HOST_WIDE_INT cst;
8112
8113 cst = tree_to_uhwi (and1);
8114 cst &= HOST_WIDE_INT_M1U
8115 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8116 change = (cst == 0);
8117 #ifdef LOAD_EXTEND_OP
8118 if (change
8119 && !flag_syntax_only
8120 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8121 == ZERO_EXTEND))
8122 {
8123 tree uns = unsigned_type_for (TREE_TYPE (and0));
8124 and0 = fold_convert_loc (loc, uns, and0);
8125 and1 = fold_convert_loc (loc, uns, and1);
8126 }
8127 #endif
8128 }
8129 if (change)
8130 {
8131 tem = force_fit_type_double (type, tree_to_double_int (and1),
8132 0, TREE_OVERFLOW (and1));
8133 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8134 fold_convert_loc (loc, type, and0), tem);
8135 }
8136 }
8137
8138 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8139 when one of the new casts will fold away. Conservatively we assume
8140 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8141 if (POINTER_TYPE_P (type)
8142 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8143 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8144 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8145 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8146 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8147 {
8148 tree arg00 = TREE_OPERAND (arg0, 0);
8149 tree arg01 = TREE_OPERAND (arg0, 1);
8150
8151 return fold_build_pointer_plus_loc
8152 (loc, fold_convert_loc (loc, type, arg00), arg01);
8153 }
8154
8155 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8156 of the same precision, and X is an integer type not narrower than
8157 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8158 if (INTEGRAL_TYPE_P (type)
8159 && TREE_CODE (op0) == BIT_NOT_EXPR
8160 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8161 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8162 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8163 {
8164 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8165 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8166 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8167 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8168 fold_convert_loc (loc, type, tem));
8169 }
8170
8171 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8172 type of X and Y (integer types only). */
8173 if (INTEGRAL_TYPE_P (type)
8174 && TREE_CODE (op0) == MULT_EXPR
8175 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8176 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8177 {
8178 /* Be careful not to introduce new overflows. */
8179 tree mult_type;
8180 if (TYPE_OVERFLOW_WRAPS (type))
8181 mult_type = type;
8182 else
8183 mult_type = unsigned_type_for (type);
8184
8185 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8186 {
8187 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8188 fold_convert_loc (loc, mult_type,
8189 TREE_OPERAND (op0, 0)),
8190 fold_convert_loc (loc, mult_type,
8191 TREE_OPERAND (op0, 1)));
8192 return fold_convert_loc (loc, type, tem);
8193 }
8194 }
8195
8196 tem = fold_convert_const (code, type, op0);
8197 return tem ? tem : NULL_TREE;
8198
8199 case ADDR_SPACE_CONVERT_EXPR:
8200 if (integer_zerop (arg0))
8201 return fold_convert_const (code, type, arg0);
8202 return NULL_TREE;
8203
8204 case FIXED_CONVERT_EXPR:
8205 tem = fold_convert_const (code, type, arg0);
8206 return tem ? tem : NULL_TREE;
8207
8208 case VIEW_CONVERT_EXPR:
8209 if (TREE_TYPE (op0) == type)
8210 return op0;
8211 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8212 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8213 type, TREE_OPERAND (op0, 0));
8214 if (TREE_CODE (op0) == MEM_REF)
8215 return fold_build2_loc (loc, MEM_REF, type,
8216 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8217
8218 /* For integral conversions with the same precision or pointer
8219 conversions use a NOP_EXPR instead. */
8220 if ((INTEGRAL_TYPE_P (type)
8221 || POINTER_TYPE_P (type))
8222 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8223 || POINTER_TYPE_P (TREE_TYPE (op0)))
8224 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8225 return fold_convert_loc (loc, type, op0);
8226
8227 /* Strip inner integral conversions that do not change the precision. */
8228 if (CONVERT_EXPR_P (op0)
8229 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8230 || POINTER_TYPE_P (TREE_TYPE (op0)))
8231 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8232 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8233 && (TYPE_PRECISION (TREE_TYPE (op0))
8234 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8235 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8236 type, TREE_OPERAND (op0, 0));
8237
8238 return fold_view_convert_expr (type, op0);
8239
8240 case NEGATE_EXPR:
8241 tem = fold_negate_expr (loc, arg0);
8242 if (tem)
8243 return fold_convert_loc (loc, type, tem);
8244 return NULL_TREE;
8245
8246 case ABS_EXPR:
8247 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8248 return fold_abs_const (arg0, type);
8249 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8250 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8251 /* Convert fabs((double)float) into (double)fabsf(float). */
8252 else if (TREE_CODE (arg0) == NOP_EXPR
8253 && TREE_CODE (type) == REAL_TYPE)
8254 {
8255 tree targ0 = strip_float_extensions (arg0);
8256 if (targ0 != arg0)
8257 return fold_convert_loc (loc, type,
8258 fold_build1_loc (loc, ABS_EXPR,
8259 TREE_TYPE (targ0),
8260 targ0));
8261 }
8262 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8263 else if (TREE_CODE (arg0) == ABS_EXPR)
8264 return arg0;
8265 else if (tree_expr_nonnegative_p (arg0))
8266 return arg0;
8267
8268 /* Strip sign ops from argument. */
8269 if (TREE_CODE (type) == REAL_TYPE)
8270 {
8271 tem = fold_strip_sign_ops (arg0);
8272 if (tem)
8273 return fold_build1_loc (loc, ABS_EXPR, type,
8274 fold_convert_loc (loc, type, tem));
8275 }
8276 return NULL_TREE;
8277
8278 case CONJ_EXPR:
8279 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8280 return fold_convert_loc (loc, type, arg0);
8281 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8282 {
8283 tree itype = TREE_TYPE (type);
8284 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8285 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8286 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8287 negate_expr (ipart));
8288 }
8289 if (TREE_CODE (arg0) == COMPLEX_CST)
8290 {
8291 tree itype = TREE_TYPE (type);
8292 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8293 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8294 return build_complex (type, rpart, negate_expr (ipart));
8295 }
8296 if (TREE_CODE (arg0) == CONJ_EXPR)
8297 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8298 return NULL_TREE;
8299
8300 case BIT_NOT_EXPR:
8301 if (TREE_CODE (arg0) == INTEGER_CST)
8302 return fold_not_const (arg0, type);
8303 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8304 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8305 /* Convert ~ (-A) to A - 1. */
8306 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8307 return fold_build2_loc (loc, MINUS_EXPR, type,
8308 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8309 build_int_cst (type, 1));
8310 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8311 else if (INTEGRAL_TYPE_P (type)
8312 && ((TREE_CODE (arg0) == MINUS_EXPR
8313 && integer_onep (TREE_OPERAND (arg0, 1)))
8314 || (TREE_CODE (arg0) == PLUS_EXPR
8315 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8316 return fold_build1_loc (loc, NEGATE_EXPR, type,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 0)));
8319 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8320 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8321 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8322 fold_convert_loc (loc, type,
8323 TREE_OPERAND (arg0, 0)))))
8324 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8325 fold_convert_loc (loc, type,
8326 TREE_OPERAND (arg0, 1)));
8327 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8328 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8329 fold_convert_loc (loc, type,
8330 TREE_OPERAND (arg0, 1)))))
8331 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8332 fold_convert_loc (loc, type,
8333 TREE_OPERAND (arg0, 0)), tem);
8334 /* Perform BIT_NOT_EXPR on each element individually. */
8335 else if (TREE_CODE (arg0) == VECTOR_CST)
8336 {
8337 tree *elements;
8338 tree elem;
8339 unsigned count = VECTOR_CST_NELTS (arg0), i;
8340
8341 elements = XALLOCAVEC (tree, count);
8342 for (i = 0; i < count; i++)
8343 {
8344 elem = VECTOR_CST_ELT (arg0, i);
8345 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8346 if (elem == NULL_TREE)
8347 break;
8348 elements[i] = elem;
8349 }
8350 if (i == count)
8351 return build_vector (type, elements);
8352 }
8353 else if (COMPARISON_CLASS_P (arg0)
8354 && (VECTOR_TYPE_P (type)
8355 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8356 {
8357 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8358 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8359 HONOR_NANS (TYPE_MODE (op_type)));
8360 if (subcode != ERROR_MARK)
8361 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8362 TREE_OPERAND (arg0, 1));
8363 }
8364
8365
8366 return NULL_TREE;
8367
8368 case TRUTH_NOT_EXPR:
8369 /* Note that the operand of this must be an int
8370 and its values must be 0 or 1.
8371 ("true" is a fixed value perhaps depending on the language,
8372 but we don't handle values other than 1 correctly yet.) */
8373 tem = fold_truth_not_expr (loc, arg0);
8374 if (!tem)
8375 return NULL_TREE;
8376 return fold_convert_loc (loc, type, tem);
8377
8378 case REALPART_EXPR:
8379 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8380 return fold_convert_loc (loc, type, arg0);
8381 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8382 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8383 TREE_OPERAND (arg0, 1));
8384 if (TREE_CODE (arg0) == COMPLEX_CST)
8385 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8386 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8387 {
8388 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8389 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8390 fold_build1_loc (loc, REALPART_EXPR, itype,
8391 TREE_OPERAND (arg0, 0)),
8392 fold_build1_loc (loc, REALPART_EXPR, itype,
8393 TREE_OPERAND (arg0, 1)));
8394 return fold_convert_loc (loc, type, tem);
8395 }
8396 if (TREE_CODE (arg0) == CONJ_EXPR)
8397 {
8398 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8399 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8400 TREE_OPERAND (arg0, 0));
8401 return fold_convert_loc (loc, type, tem);
8402 }
8403 if (TREE_CODE (arg0) == CALL_EXPR)
8404 {
8405 tree fn = get_callee_fndecl (arg0);
8406 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8407 switch (DECL_FUNCTION_CODE (fn))
8408 {
8409 CASE_FLT_FN (BUILT_IN_CEXPI):
8410 fn = mathfn_built_in (type, BUILT_IN_COS);
8411 if (fn)
8412 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8413 break;
8414
8415 default:
8416 break;
8417 }
8418 }
8419 return NULL_TREE;
8420
8421 case IMAGPART_EXPR:
8422 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8423 return build_zero_cst (type);
8424 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8425 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8426 TREE_OPERAND (arg0, 0));
8427 if (TREE_CODE (arg0) == COMPLEX_CST)
8428 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8429 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8430 {
8431 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8432 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8433 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8434 TREE_OPERAND (arg0, 0)),
8435 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8436 TREE_OPERAND (arg0, 1)));
8437 return fold_convert_loc (loc, type, tem);
8438 }
8439 if (TREE_CODE (arg0) == CONJ_EXPR)
8440 {
8441 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8442 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8443 return fold_convert_loc (loc, type, negate_expr (tem));
8444 }
8445 if (TREE_CODE (arg0) == CALL_EXPR)
8446 {
8447 tree fn = get_callee_fndecl (arg0);
8448 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8449 switch (DECL_FUNCTION_CODE (fn))
8450 {
8451 CASE_FLT_FN (BUILT_IN_CEXPI):
8452 fn = mathfn_built_in (type, BUILT_IN_SIN);
8453 if (fn)
8454 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8455 break;
8456
8457 default:
8458 break;
8459 }
8460 }
8461 return NULL_TREE;
8462
8463 case INDIRECT_REF:
8464 /* Fold *&X to X if X is an lvalue. */
8465 if (TREE_CODE (op0) == ADDR_EXPR)
8466 {
8467 tree op00 = TREE_OPERAND (op0, 0);
8468 if ((TREE_CODE (op00) == VAR_DECL
8469 || TREE_CODE (op00) == PARM_DECL
8470 || TREE_CODE (op00) == RESULT_DECL)
8471 && !TREE_READONLY (op00))
8472 return op00;
8473 }
8474 return NULL_TREE;
8475
8476 case VEC_UNPACK_LO_EXPR:
8477 case VEC_UNPACK_HI_EXPR:
8478 case VEC_UNPACK_FLOAT_LO_EXPR:
8479 case VEC_UNPACK_FLOAT_HI_EXPR:
8480 {
8481 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8482 tree *elts;
8483 enum tree_code subcode;
8484
8485 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8486 if (TREE_CODE (arg0) != VECTOR_CST)
8487 return NULL_TREE;
8488
8489 elts = XALLOCAVEC (tree, nelts * 2);
8490 if (!vec_cst_ctor_to_array (arg0, elts))
8491 return NULL_TREE;
8492
8493 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8494 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8495 elts += nelts;
8496
8497 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8498 subcode = NOP_EXPR;
8499 else
8500 subcode = FLOAT_EXPR;
8501
8502 for (i = 0; i < nelts; i++)
8503 {
8504 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8505 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8506 return NULL_TREE;
8507 }
8508
8509 return build_vector (type, elts);
8510 }
8511
8512 case REDUC_MIN_EXPR:
8513 case REDUC_MAX_EXPR:
8514 case REDUC_PLUS_EXPR:
8515 {
8516 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8517 tree *elts;
8518 enum tree_code subcode;
8519
8520 if (TREE_CODE (op0) != VECTOR_CST)
8521 return NULL_TREE;
8522
8523 elts = XALLOCAVEC (tree, nelts);
8524 if (!vec_cst_ctor_to_array (op0, elts))
8525 return NULL_TREE;
8526
8527 switch (code)
8528 {
8529 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8530 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8531 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8532 default: gcc_unreachable ();
8533 }
8534
8535 for (i = 1; i < nelts; i++)
8536 {
8537 elts[0] = const_binop (subcode, elts[0], elts[i]);
8538 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8539 return NULL_TREE;
8540 elts[i] = build_zero_cst (TREE_TYPE (type));
8541 }
8542
8543 return build_vector (type, elts);
8544 }
8545
8546 default:
8547 return NULL_TREE;
8548 } /* switch (code) */
8549 }
8550
8551
8552 /* If the operation was a conversion do _not_ mark a resulting constant
8553 with TREE_OVERFLOW if the original constant was not. These conversions
8554 have implementation defined behavior and retaining the TREE_OVERFLOW
8555 flag here would confuse later passes such as VRP. */
8556 tree
8557 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8558 tree type, tree op0)
8559 {
8560 tree res = fold_unary_loc (loc, code, type, op0);
8561 if (res
8562 && TREE_CODE (res) == INTEGER_CST
8563 && TREE_CODE (op0) == INTEGER_CST
8564 && CONVERT_EXPR_CODE_P (code))
8565 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8566
8567 return res;
8568 }
8569
8570 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8571 operands OP0 and OP1. LOC is the location of the resulting expression.
8572 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8573 Return the folded expression if folding is successful. Otherwise,
8574 return NULL_TREE. */
8575 static tree
8576 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8577 tree arg0, tree arg1, tree op0, tree op1)
8578 {
8579 tree tem;
8580
8581 /* We only do these simplifications if we are optimizing. */
8582 if (!optimize)
8583 return NULL_TREE;
8584
8585 /* Check for things like (A || B) && (A || C). We can convert this
8586 to A || (B && C). Note that either operator can be any of the four
8587 truth and/or operations and the transformation will still be
8588 valid. Also note that we only care about order for the
8589 ANDIF and ORIF operators. If B contains side effects, this
8590 might change the truth-value of A. */
8591 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8592 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8593 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8594 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8595 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8596 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8597 {
8598 tree a00 = TREE_OPERAND (arg0, 0);
8599 tree a01 = TREE_OPERAND (arg0, 1);
8600 tree a10 = TREE_OPERAND (arg1, 0);
8601 tree a11 = TREE_OPERAND (arg1, 1);
8602 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8603 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8604 && (code == TRUTH_AND_EXPR
8605 || code == TRUTH_OR_EXPR));
8606
8607 if (operand_equal_p (a00, a10, 0))
8608 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8609 fold_build2_loc (loc, code, type, a01, a11));
8610 else if (commutative && operand_equal_p (a00, a11, 0))
8611 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8612 fold_build2_loc (loc, code, type, a01, a10));
8613 else if (commutative && operand_equal_p (a01, a10, 0))
8614 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8615 fold_build2_loc (loc, code, type, a00, a11));
8616
8617 /* This case if tricky because we must either have commutative
8618 operators or else A10 must not have side-effects. */
8619
8620 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8621 && operand_equal_p (a01, a11, 0))
8622 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8623 fold_build2_loc (loc, code, type, a00, a10),
8624 a01);
8625 }
8626
8627 /* See if we can build a range comparison. */
8628 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8629 return tem;
8630
8631 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8632 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8633 {
8634 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8635 if (tem)
8636 return fold_build2_loc (loc, code, type, tem, arg1);
8637 }
8638
8639 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8640 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8641 {
8642 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8643 if (tem)
8644 return fold_build2_loc (loc, code, type, arg0, tem);
8645 }
8646
8647 /* Check for the possibility of merging component references. If our
8648 lhs is another similar operation, try to merge its rhs with our
8649 rhs. Then try to merge our lhs and rhs. */
8650 if (TREE_CODE (arg0) == code
8651 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8652 TREE_OPERAND (arg0, 1), arg1)))
8653 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8654
8655 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8656 return tem;
8657
8658 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8659 && (code == TRUTH_AND_EXPR
8660 || code == TRUTH_ANDIF_EXPR
8661 || code == TRUTH_OR_EXPR
8662 || code == TRUTH_ORIF_EXPR))
8663 {
8664 enum tree_code ncode, icode;
8665
8666 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8667 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8668 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8669
8670 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8671 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8672 We don't want to pack more than two leafs to a non-IF AND/OR
8673 expression.
8674 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8675 equal to IF-CODE, then we don't want to add right-hand operand.
8676 If the inner right-hand side of left-hand operand has
8677 side-effects, or isn't simple, then we can't add to it,
8678 as otherwise we might destroy if-sequence. */
8679 if (TREE_CODE (arg0) == icode
8680 && simple_operand_p_2 (arg1)
8681 /* Needed for sequence points to handle trappings, and
8682 side-effects. */
8683 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8684 {
8685 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8686 arg1);
8687 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8688 tem);
8689 }
8690 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8691 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8692 else if (TREE_CODE (arg1) == icode
8693 && simple_operand_p_2 (arg0)
8694 /* Needed for sequence points to handle trappings, and
8695 side-effects. */
8696 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8697 {
8698 tem = fold_build2_loc (loc, ncode, type,
8699 arg0, TREE_OPERAND (arg1, 0));
8700 return fold_build2_loc (loc, icode, type, tem,
8701 TREE_OPERAND (arg1, 1));
8702 }
8703 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8704 into (A OR B).
8705 For sequence point consistancy, we need to check for trapping,
8706 and side-effects. */
8707 else if (code == icode && simple_operand_p_2 (arg0)
8708 && simple_operand_p_2 (arg1))
8709 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8710 }
8711
8712 return NULL_TREE;
8713 }
8714
8715 /* Fold a binary expression of code CODE and type TYPE with operands
8716 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8717 Return the folded expression if folding is successful. Otherwise,
8718 return NULL_TREE. */
8719
8720 static tree
8721 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8722 {
8723 enum tree_code compl_code;
8724
8725 if (code == MIN_EXPR)
8726 compl_code = MAX_EXPR;
8727 else if (code == MAX_EXPR)
8728 compl_code = MIN_EXPR;
8729 else
8730 gcc_unreachable ();
8731
8732 /* MIN (MAX (a, b), b) == b. */
8733 if (TREE_CODE (op0) == compl_code
8734 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8735 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8736
8737 /* MIN (MAX (b, a), b) == b. */
8738 if (TREE_CODE (op0) == compl_code
8739 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8740 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8741 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8742
8743 /* MIN (a, MAX (a, b)) == a. */
8744 if (TREE_CODE (op1) == compl_code
8745 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8746 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8747 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8748
8749 /* MIN (a, MAX (b, a)) == a. */
8750 if (TREE_CODE (op1) == compl_code
8751 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8752 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8753 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8754
8755 return NULL_TREE;
8756 }
8757
8758 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8759 by changing CODE to reduce the magnitude of constants involved in
8760 ARG0 of the comparison.
8761 Returns a canonicalized comparison tree if a simplification was
8762 possible, otherwise returns NULL_TREE.
8763 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8764 valid if signed overflow is undefined. */
8765
8766 static tree
8767 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8768 tree arg0, tree arg1,
8769 bool *strict_overflow_p)
8770 {
8771 enum tree_code code0 = TREE_CODE (arg0);
8772 tree t, cst0 = NULL_TREE;
8773 int sgn0;
8774 bool swap = false;
8775
8776 /* Match A +- CST code arg1 and CST code arg1. We can change the
8777 first form only if overflow is undefined. */
8778 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8779 /* In principle pointers also have undefined overflow behavior,
8780 but that causes problems elsewhere. */
8781 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8782 && (code0 == MINUS_EXPR
8783 || code0 == PLUS_EXPR)
8784 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8785 || code0 == INTEGER_CST))
8786 return NULL_TREE;
8787
8788 /* Identify the constant in arg0 and its sign. */
8789 if (code0 == INTEGER_CST)
8790 cst0 = arg0;
8791 else
8792 cst0 = TREE_OPERAND (arg0, 1);
8793 sgn0 = tree_int_cst_sgn (cst0);
8794
8795 /* Overflowed constants and zero will cause problems. */
8796 if (integer_zerop (cst0)
8797 || TREE_OVERFLOW (cst0))
8798 return NULL_TREE;
8799
8800 /* See if we can reduce the magnitude of the constant in
8801 arg0 by changing the comparison code. */
8802 if (code0 == INTEGER_CST)
8803 {
8804 /* CST <= arg1 -> CST-1 < arg1. */
8805 if (code == LE_EXPR && sgn0 == 1)
8806 code = LT_EXPR;
8807 /* -CST < arg1 -> -CST-1 <= arg1. */
8808 else if (code == LT_EXPR && sgn0 == -1)
8809 code = LE_EXPR;
8810 /* CST > arg1 -> CST-1 >= arg1. */
8811 else if (code == GT_EXPR && sgn0 == 1)
8812 code = GE_EXPR;
8813 /* -CST >= arg1 -> -CST-1 > arg1. */
8814 else if (code == GE_EXPR && sgn0 == -1)
8815 code = GT_EXPR;
8816 else
8817 return NULL_TREE;
8818 /* arg1 code' CST' might be more canonical. */
8819 swap = true;
8820 }
8821 else
8822 {
8823 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8824 if (code == LT_EXPR
8825 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8826 code = LE_EXPR;
8827 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8828 else if (code == GT_EXPR
8829 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8830 code = GE_EXPR;
8831 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8832 else if (code == LE_EXPR
8833 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8834 code = LT_EXPR;
8835 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8836 else if (code == GE_EXPR
8837 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8838 code = GT_EXPR;
8839 else
8840 return NULL_TREE;
8841 *strict_overflow_p = true;
8842 }
8843
8844 /* Now build the constant reduced in magnitude. But not if that
8845 would produce one outside of its types range. */
8846 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8847 && ((sgn0 == 1
8848 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8849 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8850 || (sgn0 == -1
8851 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8852 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8853 /* We cannot swap the comparison here as that would cause us to
8854 endlessly recurse. */
8855 return NULL_TREE;
8856
8857 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8858 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8859 if (code0 != INTEGER_CST)
8860 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8861 t = fold_convert (TREE_TYPE (arg1), t);
8862
8863 /* If swapping might yield to a more canonical form, do so. */
8864 if (swap)
8865 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8866 else
8867 return fold_build2_loc (loc, code, type, t, arg1);
8868 }
8869
8870 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8871 overflow further. Try to decrease the magnitude of constants involved
8872 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8873 and put sole constants at the second argument position.
8874 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8875
8876 static tree
8877 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8878 tree arg0, tree arg1)
8879 {
8880 tree t;
8881 bool strict_overflow_p;
8882 const char * const warnmsg = G_("assuming signed overflow does not occur "
8883 "when reducing constant in comparison");
8884
8885 /* Try canonicalization by simplifying arg0. */
8886 strict_overflow_p = false;
8887 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8888 &strict_overflow_p);
8889 if (t)
8890 {
8891 if (strict_overflow_p)
8892 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8893 return t;
8894 }
8895
8896 /* Try canonicalization by simplifying arg1 using the swapped
8897 comparison. */
8898 code = swap_tree_comparison (code);
8899 strict_overflow_p = false;
8900 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8901 &strict_overflow_p);
8902 if (t && strict_overflow_p)
8903 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8904 return t;
8905 }
8906
8907 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8908 space. This is used to avoid issuing overflow warnings for
8909 expressions like &p->x which can not wrap. */
8910
8911 static bool
8912 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8913 {
8914 double_int di_offset, total;
8915
8916 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8917 return true;
8918
8919 if (bitpos < 0)
8920 return true;
8921
8922 if (offset == NULL_TREE)
8923 di_offset = double_int_zero;
8924 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8925 return true;
8926 else
8927 di_offset = TREE_INT_CST (offset);
8928
8929 bool overflow;
8930 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8931 total = di_offset.add_with_sign (units, true, &overflow);
8932 if (overflow)
8933 return true;
8934
8935 if (total.high != 0)
8936 return true;
8937
8938 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8939 if (size <= 0)
8940 return true;
8941
8942 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8943 array. */
8944 if (TREE_CODE (base) == ADDR_EXPR)
8945 {
8946 HOST_WIDE_INT base_size;
8947
8948 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8949 if (base_size > 0 && size < base_size)
8950 size = base_size;
8951 }
8952
8953 return total.low > (unsigned HOST_WIDE_INT) size;
8954 }
8955
8956 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8957 kind INTEGER_CST. This makes sure to properly sign-extend the
8958 constant. */
8959
8960 static HOST_WIDE_INT
8961 size_low_cst (const_tree t)
8962 {
8963 double_int d = tree_to_double_int (t);
8964 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8965 }
8966
8967 /* Subroutine of fold_binary. This routine performs all of the
8968 transformations that are common to the equality/inequality
8969 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8970 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8971 fold_binary should call fold_binary. Fold a comparison with
8972 tree code CODE and type TYPE with operands OP0 and OP1. Return
8973 the folded comparison or NULL_TREE. */
8974
8975 static tree
8976 fold_comparison (location_t loc, enum tree_code code, tree type,
8977 tree op0, tree op1)
8978 {
8979 tree arg0, arg1, tem;
8980
8981 arg0 = op0;
8982 arg1 = op1;
8983
8984 STRIP_SIGN_NOPS (arg0);
8985 STRIP_SIGN_NOPS (arg1);
8986
8987 tem = fold_relational_const (code, type, arg0, arg1);
8988 if (tem != NULL_TREE)
8989 return tem;
8990
8991 /* If one arg is a real or integer constant, put it last. */
8992 if (tree_swap_operands_p (arg0, arg1, true))
8993 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8994
8995 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8996 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8997 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8998 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8999 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
9000 && (TREE_CODE (arg1) == INTEGER_CST
9001 && !TREE_OVERFLOW (arg1)))
9002 {
9003 tree const1 = TREE_OPERAND (arg0, 1);
9004 tree const2 = arg1;
9005 tree variable = TREE_OPERAND (arg0, 0);
9006 tree lhs;
9007 int lhs_add;
9008 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9009
9010 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9011 TREE_TYPE (arg1), const2, const1);
9012
9013 /* If the constant operation overflowed this can be
9014 simplified as a comparison against INT_MAX/INT_MIN. */
9015 if (TREE_CODE (lhs) == INTEGER_CST
9016 && TREE_OVERFLOW (lhs))
9017 {
9018 int const1_sgn = tree_int_cst_sgn (const1);
9019 enum tree_code code2 = code;
9020
9021 /* Get the sign of the constant on the lhs if the
9022 operation were VARIABLE + CONST1. */
9023 if (TREE_CODE (arg0) == MINUS_EXPR)
9024 const1_sgn = -const1_sgn;
9025
9026 /* The sign of the constant determines if we overflowed
9027 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9028 Canonicalize to the INT_MIN overflow by swapping the comparison
9029 if necessary. */
9030 if (const1_sgn == -1)
9031 code2 = swap_tree_comparison (code);
9032
9033 /* We now can look at the canonicalized case
9034 VARIABLE + 1 CODE2 INT_MIN
9035 and decide on the result. */
9036 if (code2 == LT_EXPR
9037 || code2 == LE_EXPR
9038 || code2 == EQ_EXPR)
9039 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9040 else if (code2 == NE_EXPR
9041 || code2 == GE_EXPR
9042 || code2 == GT_EXPR)
9043 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9044 }
9045
9046 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9047 && (TREE_CODE (lhs) != INTEGER_CST
9048 || !TREE_OVERFLOW (lhs)))
9049 {
9050 if (code != EQ_EXPR && code != NE_EXPR)
9051 fold_overflow_warning ("assuming signed overflow does not occur "
9052 "when changing X +- C1 cmp C2 to "
9053 "X cmp C1 +- C2",
9054 WARN_STRICT_OVERFLOW_COMPARISON);
9055 return fold_build2_loc (loc, code, type, variable, lhs);
9056 }
9057 }
9058
9059 /* For comparisons of pointers we can decompose it to a compile time
9060 comparison of the base objects and the offsets into the object.
9061 This requires at least one operand being an ADDR_EXPR or a
9062 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9063 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9064 && (TREE_CODE (arg0) == ADDR_EXPR
9065 || TREE_CODE (arg1) == ADDR_EXPR
9066 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9067 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9068 {
9069 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9070 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9071 enum machine_mode mode;
9072 int volatilep, unsignedp;
9073 bool indirect_base0 = false, indirect_base1 = false;
9074
9075 /* Get base and offset for the access. Strip ADDR_EXPR for
9076 get_inner_reference, but put it back by stripping INDIRECT_REF
9077 off the base object if possible. indirect_baseN will be true
9078 if baseN is not an address but refers to the object itself. */
9079 base0 = arg0;
9080 if (TREE_CODE (arg0) == ADDR_EXPR)
9081 {
9082 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9083 &bitsize, &bitpos0, &offset0, &mode,
9084 &unsignedp, &volatilep);
9085 if (TREE_CODE (base0) == INDIRECT_REF)
9086 base0 = TREE_OPERAND (base0, 0);
9087 else
9088 indirect_base0 = true;
9089 }
9090 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9091 {
9092 base0 = TREE_OPERAND (arg0, 0);
9093 STRIP_SIGN_NOPS (base0);
9094 if (TREE_CODE (base0) == ADDR_EXPR)
9095 {
9096 base0 = TREE_OPERAND (base0, 0);
9097 indirect_base0 = true;
9098 }
9099 offset0 = TREE_OPERAND (arg0, 1);
9100 if (tree_fits_shwi_p (offset0))
9101 {
9102 HOST_WIDE_INT off = size_low_cst (offset0);
9103 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9104 * BITS_PER_UNIT)
9105 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9106 {
9107 bitpos0 = off * BITS_PER_UNIT;
9108 offset0 = NULL_TREE;
9109 }
9110 }
9111 }
9112
9113 base1 = arg1;
9114 if (TREE_CODE (arg1) == ADDR_EXPR)
9115 {
9116 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9117 &bitsize, &bitpos1, &offset1, &mode,
9118 &unsignedp, &volatilep);
9119 if (TREE_CODE (base1) == INDIRECT_REF)
9120 base1 = TREE_OPERAND (base1, 0);
9121 else
9122 indirect_base1 = true;
9123 }
9124 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9125 {
9126 base1 = TREE_OPERAND (arg1, 0);
9127 STRIP_SIGN_NOPS (base1);
9128 if (TREE_CODE (base1) == ADDR_EXPR)
9129 {
9130 base1 = TREE_OPERAND (base1, 0);
9131 indirect_base1 = true;
9132 }
9133 offset1 = TREE_OPERAND (arg1, 1);
9134 if (tree_fits_shwi_p (offset1))
9135 {
9136 HOST_WIDE_INT off = size_low_cst (offset1);
9137 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9138 * BITS_PER_UNIT)
9139 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9140 {
9141 bitpos1 = off * BITS_PER_UNIT;
9142 offset1 = NULL_TREE;
9143 }
9144 }
9145 }
9146
9147 /* A local variable can never be pointed to by
9148 the default SSA name of an incoming parameter. */
9149 if ((TREE_CODE (arg0) == ADDR_EXPR
9150 && indirect_base0
9151 && TREE_CODE (base0) == VAR_DECL
9152 && auto_var_in_fn_p (base0, current_function_decl)
9153 && !indirect_base1
9154 && TREE_CODE (base1) == SSA_NAME
9155 && SSA_NAME_IS_DEFAULT_DEF (base1)
9156 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9157 || (TREE_CODE (arg1) == ADDR_EXPR
9158 && indirect_base1
9159 && TREE_CODE (base1) == VAR_DECL
9160 && auto_var_in_fn_p (base1, current_function_decl)
9161 && !indirect_base0
9162 && TREE_CODE (base0) == SSA_NAME
9163 && SSA_NAME_IS_DEFAULT_DEF (base0)
9164 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9165 {
9166 if (code == NE_EXPR)
9167 return constant_boolean_node (1, type);
9168 else if (code == EQ_EXPR)
9169 return constant_boolean_node (0, type);
9170 }
9171 /* If we have equivalent bases we might be able to simplify. */
9172 else if (indirect_base0 == indirect_base1
9173 && operand_equal_p (base0, base1, 0))
9174 {
9175 /* We can fold this expression to a constant if the non-constant
9176 offset parts are equal. */
9177 if ((offset0 == offset1
9178 || (offset0 && offset1
9179 && operand_equal_p (offset0, offset1, 0)))
9180 && (code == EQ_EXPR
9181 || code == NE_EXPR
9182 || (indirect_base0 && DECL_P (base0))
9183 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9184
9185 {
9186 if (code != EQ_EXPR
9187 && code != NE_EXPR
9188 && bitpos0 != bitpos1
9189 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9190 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9191 fold_overflow_warning (("assuming pointer wraparound does not "
9192 "occur when comparing P +- C1 with "
9193 "P +- C2"),
9194 WARN_STRICT_OVERFLOW_CONDITIONAL);
9195
9196 switch (code)
9197 {
9198 case EQ_EXPR:
9199 return constant_boolean_node (bitpos0 == bitpos1, type);
9200 case NE_EXPR:
9201 return constant_boolean_node (bitpos0 != bitpos1, type);
9202 case LT_EXPR:
9203 return constant_boolean_node (bitpos0 < bitpos1, type);
9204 case LE_EXPR:
9205 return constant_boolean_node (bitpos0 <= bitpos1, type);
9206 case GE_EXPR:
9207 return constant_boolean_node (bitpos0 >= bitpos1, type);
9208 case GT_EXPR:
9209 return constant_boolean_node (bitpos0 > bitpos1, type);
9210 default:;
9211 }
9212 }
9213 /* We can simplify the comparison to a comparison of the variable
9214 offset parts if the constant offset parts are equal.
9215 Be careful to use signed sizetype here because otherwise we
9216 mess with array offsets in the wrong way. This is possible
9217 because pointer arithmetic is restricted to retain within an
9218 object and overflow on pointer differences is undefined as of
9219 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9220 else if (bitpos0 == bitpos1
9221 && ((code == EQ_EXPR || code == NE_EXPR)
9222 || (indirect_base0 && DECL_P (base0))
9223 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9224 {
9225 /* By converting to signed sizetype we cover middle-end pointer
9226 arithmetic which operates on unsigned pointer types of size
9227 type size and ARRAY_REF offsets which are properly sign or
9228 zero extended from their type in case it is narrower than
9229 sizetype. */
9230 if (offset0 == NULL_TREE)
9231 offset0 = build_int_cst (ssizetype, 0);
9232 else
9233 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9234 if (offset1 == NULL_TREE)
9235 offset1 = build_int_cst (ssizetype, 0);
9236 else
9237 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9238
9239 if (code != EQ_EXPR
9240 && code != NE_EXPR
9241 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9242 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9243 fold_overflow_warning (("assuming pointer wraparound does not "
9244 "occur when comparing P +- C1 with "
9245 "P +- C2"),
9246 WARN_STRICT_OVERFLOW_COMPARISON);
9247
9248 return fold_build2_loc (loc, code, type, offset0, offset1);
9249 }
9250 }
9251 /* For non-equal bases we can simplify if they are addresses
9252 of local binding decls or constants. */
9253 else if (indirect_base0 && indirect_base1
9254 /* We know that !operand_equal_p (base0, base1, 0)
9255 because the if condition was false. But make
9256 sure two decls are not the same. */
9257 && base0 != base1
9258 && TREE_CODE (arg0) == ADDR_EXPR
9259 && TREE_CODE (arg1) == ADDR_EXPR
9260 && (((TREE_CODE (base0) == VAR_DECL
9261 || TREE_CODE (base0) == PARM_DECL)
9262 && (targetm.binds_local_p (base0)
9263 || CONSTANT_CLASS_P (base1)))
9264 || CONSTANT_CLASS_P (base0))
9265 && (((TREE_CODE (base1) == VAR_DECL
9266 || TREE_CODE (base1) == PARM_DECL)
9267 && (targetm.binds_local_p (base1)
9268 || CONSTANT_CLASS_P (base0)))
9269 || CONSTANT_CLASS_P (base1)))
9270 {
9271 if (code == EQ_EXPR)
9272 return omit_two_operands_loc (loc, type, boolean_false_node,
9273 arg0, arg1);
9274 else if (code == NE_EXPR)
9275 return omit_two_operands_loc (loc, type, boolean_true_node,
9276 arg0, arg1);
9277 }
9278 /* For equal offsets we can simplify to a comparison of the
9279 base addresses. */
9280 else if (bitpos0 == bitpos1
9281 && (indirect_base0
9282 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9283 && (indirect_base1
9284 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9285 && ((offset0 == offset1)
9286 || (offset0 && offset1
9287 && operand_equal_p (offset0, offset1, 0))))
9288 {
9289 if (indirect_base0)
9290 base0 = build_fold_addr_expr_loc (loc, base0);
9291 if (indirect_base1)
9292 base1 = build_fold_addr_expr_loc (loc, base1);
9293 return fold_build2_loc (loc, code, type, base0, base1);
9294 }
9295 }
9296
9297 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9298 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9299 the resulting offset is smaller in absolute value than the
9300 original one. */
9301 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9302 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9303 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9304 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9305 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9306 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9307 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9308 {
9309 tree const1 = TREE_OPERAND (arg0, 1);
9310 tree const2 = TREE_OPERAND (arg1, 1);
9311 tree variable1 = TREE_OPERAND (arg0, 0);
9312 tree variable2 = TREE_OPERAND (arg1, 0);
9313 tree cst;
9314 const char * const warnmsg = G_("assuming signed overflow does not "
9315 "occur when combining constants around "
9316 "a comparison");
9317
9318 /* Put the constant on the side where it doesn't overflow and is
9319 of lower absolute value than before. */
9320 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9321 ? MINUS_EXPR : PLUS_EXPR,
9322 const2, const1);
9323 if (!TREE_OVERFLOW (cst)
9324 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9325 {
9326 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9327 return fold_build2_loc (loc, code, type,
9328 variable1,
9329 fold_build2_loc (loc,
9330 TREE_CODE (arg1), TREE_TYPE (arg1),
9331 variable2, cst));
9332 }
9333
9334 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9335 ? MINUS_EXPR : PLUS_EXPR,
9336 const1, const2);
9337 if (!TREE_OVERFLOW (cst)
9338 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9339 {
9340 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9341 return fold_build2_loc (loc, code, type,
9342 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9343 variable1, cst),
9344 variable2);
9345 }
9346 }
9347
9348 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9349 signed arithmetic case. That form is created by the compiler
9350 often enough for folding it to be of value. One example is in
9351 computing loop trip counts after Operator Strength Reduction. */
9352 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9353 && TREE_CODE (arg0) == MULT_EXPR
9354 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9355 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9356 && integer_zerop (arg1))
9357 {
9358 tree const1 = TREE_OPERAND (arg0, 1);
9359 tree const2 = arg1; /* zero */
9360 tree variable1 = TREE_OPERAND (arg0, 0);
9361 enum tree_code cmp_code = code;
9362
9363 /* Handle unfolded multiplication by zero. */
9364 if (integer_zerop (const1))
9365 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9366
9367 fold_overflow_warning (("assuming signed overflow does not occur when "
9368 "eliminating multiplication in comparison "
9369 "with zero"),
9370 WARN_STRICT_OVERFLOW_COMPARISON);
9371
9372 /* If const1 is negative we swap the sense of the comparison. */
9373 if (tree_int_cst_sgn (const1) < 0)
9374 cmp_code = swap_tree_comparison (cmp_code);
9375
9376 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9377 }
9378
9379 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9380 if (tem)
9381 return tem;
9382
9383 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9384 {
9385 tree targ0 = strip_float_extensions (arg0);
9386 tree targ1 = strip_float_extensions (arg1);
9387 tree newtype = TREE_TYPE (targ0);
9388
9389 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9390 newtype = TREE_TYPE (targ1);
9391
9392 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9393 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9394 return fold_build2_loc (loc, code, type,
9395 fold_convert_loc (loc, newtype, targ0),
9396 fold_convert_loc (loc, newtype, targ1));
9397
9398 /* (-a) CMP (-b) -> b CMP a */
9399 if (TREE_CODE (arg0) == NEGATE_EXPR
9400 && TREE_CODE (arg1) == NEGATE_EXPR)
9401 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9402 TREE_OPERAND (arg0, 0));
9403
9404 if (TREE_CODE (arg1) == REAL_CST)
9405 {
9406 REAL_VALUE_TYPE cst;
9407 cst = TREE_REAL_CST (arg1);
9408
9409 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9410 if (TREE_CODE (arg0) == NEGATE_EXPR)
9411 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9412 TREE_OPERAND (arg0, 0),
9413 build_real (TREE_TYPE (arg1),
9414 real_value_negate (&cst)));
9415
9416 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9417 /* a CMP (-0) -> a CMP 0 */
9418 if (REAL_VALUE_MINUS_ZERO (cst))
9419 return fold_build2_loc (loc, code, type, arg0,
9420 build_real (TREE_TYPE (arg1), dconst0));
9421
9422 /* x != NaN is always true, other ops are always false. */
9423 if (REAL_VALUE_ISNAN (cst)
9424 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9425 {
9426 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9427 return omit_one_operand_loc (loc, type, tem, arg0);
9428 }
9429
9430 /* Fold comparisons against infinity. */
9431 if (REAL_VALUE_ISINF (cst)
9432 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9433 {
9434 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9435 if (tem != NULL_TREE)
9436 return tem;
9437 }
9438 }
9439
9440 /* If this is a comparison of a real constant with a PLUS_EXPR
9441 or a MINUS_EXPR of a real constant, we can convert it into a
9442 comparison with a revised real constant as long as no overflow
9443 occurs when unsafe_math_optimizations are enabled. */
9444 if (flag_unsafe_math_optimizations
9445 && TREE_CODE (arg1) == REAL_CST
9446 && (TREE_CODE (arg0) == PLUS_EXPR
9447 || TREE_CODE (arg0) == MINUS_EXPR)
9448 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9449 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9450 ? MINUS_EXPR : PLUS_EXPR,
9451 arg1, TREE_OPERAND (arg0, 1)))
9452 && !TREE_OVERFLOW (tem))
9453 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9454
9455 /* Likewise, we can simplify a comparison of a real constant with
9456 a MINUS_EXPR whose first operand is also a real constant, i.e.
9457 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9458 floating-point types only if -fassociative-math is set. */
9459 if (flag_associative_math
9460 && TREE_CODE (arg1) == REAL_CST
9461 && TREE_CODE (arg0) == MINUS_EXPR
9462 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9463 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9464 arg1))
9465 && !TREE_OVERFLOW (tem))
9466 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9467 TREE_OPERAND (arg0, 1), tem);
9468
9469 /* Fold comparisons against built-in math functions. */
9470 if (TREE_CODE (arg1) == REAL_CST
9471 && flag_unsafe_math_optimizations
9472 && ! flag_errno_math)
9473 {
9474 enum built_in_function fcode = builtin_mathfn_code (arg0);
9475
9476 if (fcode != END_BUILTINS)
9477 {
9478 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9479 if (tem != NULL_TREE)
9480 return tem;
9481 }
9482 }
9483 }
9484
9485 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9486 && CONVERT_EXPR_P (arg0))
9487 {
9488 /* If we are widening one operand of an integer comparison,
9489 see if the other operand is similarly being widened. Perhaps we
9490 can do the comparison in the narrower type. */
9491 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9492 if (tem)
9493 return tem;
9494
9495 /* Or if we are changing signedness. */
9496 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9497 if (tem)
9498 return tem;
9499 }
9500
9501 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9502 constant, we can simplify it. */
9503 if (TREE_CODE (arg1) == INTEGER_CST
9504 && (TREE_CODE (arg0) == MIN_EXPR
9505 || TREE_CODE (arg0) == MAX_EXPR)
9506 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9507 {
9508 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9509 if (tem)
9510 return tem;
9511 }
9512
9513 /* Simplify comparison of something with itself. (For IEEE
9514 floating-point, we can only do some of these simplifications.) */
9515 if (operand_equal_p (arg0, arg1, 0))
9516 {
9517 switch (code)
9518 {
9519 case EQ_EXPR:
9520 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9521 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9522 return constant_boolean_node (1, type);
9523 break;
9524
9525 case GE_EXPR:
9526 case LE_EXPR:
9527 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9528 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9529 return constant_boolean_node (1, type);
9530 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9531
9532 case NE_EXPR:
9533 /* For NE, we can only do this simplification if integer
9534 or we don't honor IEEE floating point NaNs. */
9535 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9536 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9537 break;
9538 /* ... fall through ... */
9539 case GT_EXPR:
9540 case LT_EXPR:
9541 return constant_boolean_node (0, type);
9542 default:
9543 gcc_unreachable ();
9544 }
9545 }
9546
9547 /* If we are comparing an expression that just has comparisons
9548 of two integer values, arithmetic expressions of those comparisons,
9549 and constants, we can simplify it. There are only three cases
9550 to check: the two values can either be equal, the first can be
9551 greater, or the second can be greater. Fold the expression for
9552 those three values. Since each value must be 0 or 1, we have
9553 eight possibilities, each of which corresponds to the constant 0
9554 or 1 or one of the six possible comparisons.
9555
9556 This handles common cases like (a > b) == 0 but also handles
9557 expressions like ((x > y) - (y > x)) > 0, which supposedly
9558 occur in macroized code. */
9559
9560 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9561 {
9562 tree cval1 = 0, cval2 = 0;
9563 int save_p = 0;
9564
9565 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9566 /* Don't handle degenerate cases here; they should already
9567 have been handled anyway. */
9568 && cval1 != 0 && cval2 != 0
9569 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9570 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9571 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9572 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9573 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9574 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9575 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9576 {
9577 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9578 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9579
9580 /* We can't just pass T to eval_subst in case cval1 or cval2
9581 was the same as ARG1. */
9582
9583 tree high_result
9584 = fold_build2_loc (loc, code, type,
9585 eval_subst (loc, arg0, cval1, maxval,
9586 cval2, minval),
9587 arg1);
9588 tree equal_result
9589 = fold_build2_loc (loc, code, type,
9590 eval_subst (loc, arg0, cval1, maxval,
9591 cval2, maxval),
9592 arg1);
9593 tree low_result
9594 = fold_build2_loc (loc, code, type,
9595 eval_subst (loc, arg0, cval1, minval,
9596 cval2, maxval),
9597 arg1);
9598
9599 /* All three of these results should be 0 or 1. Confirm they are.
9600 Then use those values to select the proper code to use. */
9601
9602 if (TREE_CODE (high_result) == INTEGER_CST
9603 && TREE_CODE (equal_result) == INTEGER_CST
9604 && TREE_CODE (low_result) == INTEGER_CST)
9605 {
9606 /* Make a 3-bit mask with the high-order bit being the
9607 value for `>', the next for '=', and the low for '<'. */
9608 switch ((integer_onep (high_result) * 4)
9609 + (integer_onep (equal_result) * 2)
9610 + integer_onep (low_result))
9611 {
9612 case 0:
9613 /* Always false. */
9614 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9615 case 1:
9616 code = LT_EXPR;
9617 break;
9618 case 2:
9619 code = EQ_EXPR;
9620 break;
9621 case 3:
9622 code = LE_EXPR;
9623 break;
9624 case 4:
9625 code = GT_EXPR;
9626 break;
9627 case 5:
9628 code = NE_EXPR;
9629 break;
9630 case 6:
9631 code = GE_EXPR;
9632 break;
9633 case 7:
9634 /* Always true. */
9635 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9636 }
9637
9638 if (save_p)
9639 {
9640 tem = save_expr (build2 (code, type, cval1, cval2));
9641 SET_EXPR_LOCATION (tem, loc);
9642 return tem;
9643 }
9644 return fold_build2_loc (loc, code, type, cval1, cval2);
9645 }
9646 }
9647 }
9648
9649 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9650 into a single range test. */
9651 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9652 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9653 && TREE_CODE (arg1) == INTEGER_CST
9654 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9655 && !integer_zerop (TREE_OPERAND (arg0, 1))
9656 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9657 && !TREE_OVERFLOW (arg1))
9658 {
9659 tem = fold_div_compare (loc, code, type, arg0, arg1);
9660 if (tem != NULL_TREE)
9661 return tem;
9662 }
9663
9664 /* Fold ~X op ~Y as Y op X. */
9665 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9666 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9667 {
9668 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9669 return fold_build2_loc (loc, code, type,
9670 fold_convert_loc (loc, cmp_type,
9671 TREE_OPERAND (arg1, 0)),
9672 TREE_OPERAND (arg0, 0));
9673 }
9674
9675 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9676 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9677 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9678 {
9679 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9680 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9681 TREE_OPERAND (arg0, 0),
9682 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9683 fold_convert_loc (loc, cmp_type, arg1)));
9684 }
9685
9686 return NULL_TREE;
9687 }
9688
9689
9690 /* Subroutine of fold_binary. Optimize complex multiplications of the
9691 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9692 argument EXPR represents the expression "z" of type TYPE. */
9693
9694 static tree
9695 fold_mult_zconjz (location_t loc, tree type, tree expr)
9696 {
9697 tree itype = TREE_TYPE (type);
9698 tree rpart, ipart, tem;
9699
9700 if (TREE_CODE (expr) == COMPLEX_EXPR)
9701 {
9702 rpart = TREE_OPERAND (expr, 0);
9703 ipart = TREE_OPERAND (expr, 1);
9704 }
9705 else if (TREE_CODE (expr) == COMPLEX_CST)
9706 {
9707 rpart = TREE_REALPART (expr);
9708 ipart = TREE_IMAGPART (expr);
9709 }
9710 else
9711 {
9712 expr = save_expr (expr);
9713 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9714 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9715 }
9716
9717 rpart = save_expr (rpart);
9718 ipart = save_expr (ipart);
9719 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9720 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9721 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9722 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9723 build_zero_cst (itype));
9724 }
9725
9726
9727 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9728 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9729 guarantees that P and N have the same least significant log2(M) bits.
9730 N is not otherwise constrained. In particular, N is not normalized to
9731 0 <= N < M as is common. In general, the precise value of P is unknown.
9732 M is chosen as large as possible such that constant N can be determined.
9733
9734 Returns M and sets *RESIDUE to N.
9735
9736 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9737 account. This is not always possible due to PR 35705.
9738 */
9739
9740 static unsigned HOST_WIDE_INT
9741 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9742 bool allow_func_align)
9743 {
9744 enum tree_code code;
9745
9746 *residue = 0;
9747
9748 code = TREE_CODE (expr);
9749 if (code == ADDR_EXPR)
9750 {
9751 unsigned int bitalign;
9752 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9753 *residue /= BITS_PER_UNIT;
9754 return bitalign / BITS_PER_UNIT;
9755 }
9756 else if (code == POINTER_PLUS_EXPR)
9757 {
9758 tree op0, op1;
9759 unsigned HOST_WIDE_INT modulus;
9760 enum tree_code inner_code;
9761
9762 op0 = TREE_OPERAND (expr, 0);
9763 STRIP_NOPS (op0);
9764 modulus = get_pointer_modulus_and_residue (op0, residue,
9765 allow_func_align);
9766
9767 op1 = TREE_OPERAND (expr, 1);
9768 STRIP_NOPS (op1);
9769 inner_code = TREE_CODE (op1);
9770 if (inner_code == INTEGER_CST)
9771 {
9772 *residue += TREE_INT_CST_LOW (op1);
9773 return modulus;
9774 }
9775 else if (inner_code == MULT_EXPR)
9776 {
9777 op1 = TREE_OPERAND (op1, 1);
9778 if (TREE_CODE (op1) == INTEGER_CST)
9779 {
9780 unsigned HOST_WIDE_INT align;
9781
9782 /* Compute the greatest power-of-2 divisor of op1. */
9783 align = TREE_INT_CST_LOW (op1);
9784 align &= -align;
9785
9786 /* If align is non-zero and less than *modulus, replace
9787 *modulus with align., If align is 0, then either op1 is 0
9788 or the greatest power-of-2 divisor of op1 doesn't fit in an
9789 unsigned HOST_WIDE_INT. In either case, no additional
9790 constraint is imposed. */
9791 if (align)
9792 modulus = MIN (modulus, align);
9793
9794 return modulus;
9795 }
9796 }
9797 }
9798
9799 /* If we get here, we were unable to determine anything useful about the
9800 expression. */
9801 return 1;
9802 }
9803
9804 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9805 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9806
9807 static bool
9808 vec_cst_ctor_to_array (tree arg, tree *elts)
9809 {
9810 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9811
9812 if (TREE_CODE (arg) == VECTOR_CST)
9813 {
9814 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9815 elts[i] = VECTOR_CST_ELT (arg, i);
9816 }
9817 else if (TREE_CODE (arg) == CONSTRUCTOR)
9818 {
9819 constructor_elt *elt;
9820
9821 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9822 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9823 return false;
9824 else
9825 elts[i] = elt->value;
9826 }
9827 else
9828 return false;
9829 for (; i < nelts; i++)
9830 elts[i]
9831 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9832 return true;
9833 }
9834
9835 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9836 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9837 NULL_TREE otherwise. */
9838
9839 static tree
9840 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9841 {
9842 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9843 tree *elts;
9844 bool need_ctor = false;
9845
9846 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9847 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9848 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9849 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9850 return NULL_TREE;
9851
9852 elts = XALLOCAVEC (tree, nelts * 3);
9853 if (!vec_cst_ctor_to_array (arg0, elts)
9854 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9855 return NULL_TREE;
9856
9857 for (i = 0; i < nelts; i++)
9858 {
9859 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9860 need_ctor = true;
9861 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9862 }
9863
9864 if (need_ctor)
9865 {
9866 vec<constructor_elt, va_gc> *v;
9867 vec_alloc (v, nelts);
9868 for (i = 0; i < nelts; i++)
9869 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9870 return build_constructor (type, v);
9871 }
9872 else
9873 return build_vector (type, &elts[2 * nelts]);
9874 }
9875
9876 /* Try to fold a pointer difference of type TYPE two address expressions of
9877 array references AREF0 and AREF1 using location LOC. Return a
9878 simplified expression for the difference or NULL_TREE. */
9879
9880 static tree
9881 fold_addr_of_array_ref_difference (location_t loc, tree type,
9882 tree aref0, tree aref1)
9883 {
9884 tree base0 = TREE_OPERAND (aref0, 0);
9885 tree base1 = TREE_OPERAND (aref1, 0);
9886 tree base_offset = build_int_cst (type, 0);
9887
9888 /* If the bases are array references as well, recurse. If the bases
9889 are pointer indirections compute the difference of the pointers.
9890 If the bases are equal, we are set. */
9891 if ((TREE_CODE (base0) == ARRAY_REF
9892 && TREE_CODE (base1) == ARRAY_REF
9893 && (base_offset
9894 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9895 || (INDIRECT_REF_P (base0)
9896 && INDIRECT_REF_P (base1)
9897 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9898 TREE_OPERAND (base0, 0),
9899 TREE_OPERAND (base1, 0))))
9900 || operand_equal_p (base0, base1, 0))
9901 {
9902 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9903 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9904 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9905 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9906 return fold_build2_loc (loc, PLUS_EXPR, type,
9907 base_offset,
9908 fold_build2_loc (loc, MULT_EXPR, type,
9909 diff, esz));
9910 }
9911 return NULL_TREE;
9912 }
9913
9914 /* If the real or vector real constant CST of type TYPE has an exact
9915 inverse, return it, else return NULL. */
9916
9917 static tree
9918 exact_inverse (tree type, tree cst)
9919 {
9920 REAL_VALUE_TYPE r;
9921 tree unit_type, *elts;
9922 enum machine_mode mode;
9923 unsigned vec_nelts, i;
9924
9925 switch (TREE_CODE (cst))
9926 {
9927 case REAL_CST:
9928 r = TREE_REAL_CST (cst);
9929
9930 if (exact_real_inverse (TYPE_MODE (type), &r))
9931 return build_real (type, r);
9932
9933 return NULL_TREE;
9934
9935 case VECTOR_CST:
9936 vec_nelts = VECTOR_CST_NELTS (cst);
9937 elts = XALLOCAVEC (tree, vec_nelts);
9938 unit_type = TREE_TYPE (type);
9939 mode = TYPE_MODE (unit_type);
9940
9941 for (i = 0; i < vec_nelts; i++)
9942 {
9943 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9944 if (!exact_real_inverse (mode, &r))
9945 return NULL_TREE;
9946 elts[i] = build_real (unit_type, r);
9947 }
9948
9949 return build_vector (type, elts);
9950
9951 default:
9952 return NULL_TREE;
9953 }
9954 }
9955
9956 /* Mask out the tz least significant bits of X of type TYPE where
9957 tz is the number of trailing zeroes in Y. */
9958 static double_int
9959 mask_with_tz (tree type, double_int x, double_int y)
9960 {
9961 int tz = y.trailing_zeros ();
9962
9963 if (tz > 0)
9964 {
9965 double_int mask;
9966
9967 mask = ~double_int::mask (tz);
9968 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9969 return mask & x;
9970 }
9971 return x;
9972 }
9973
9974 /* Return true when T is an address and is known to be nonzero.
9975 For floating point we further ensure that T is not denormal.
9976 Similar logic is present in nonzero_address in rtlanal.h.
9977
9978 If the return value is based on the assumption that signed overflow
9979 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9980 change *STRICT_OVERFLOW_P. */
9981
9982 static bool
9983 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9984 {
9985 tree type = TREE_TYPE (t);
9986 enum tree_code code;
9987
9988 /* Doing something useful for floating point would need more work. */
9989 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9990 return false;
9991
9992 code = TREE_CODE (t);
9993 switch (TREE_CODE_CLASS (code))
9994 {
9995 case tcc_unary:
9996 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9997 strict_overflow_p);
9998 case tcc_binary:
9999 case tcc_comparison:
10000 return tree_binary_nonzero_warnv_p (code, type,
10001 TREE_OPERAND (t, 0),
10002 TREE_OPERAND (t, 1),
10003 strict_overflow_p);
10004 case tcc_constant:
10005 case tcc_declaration:
10006 case tcc_reference:
10007 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10008
10009 default:
10010 break;
10011 }
10012
10013 switch (code)
10014 {
10015 case TRUTH_NOT_EXPR:
10016 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10017 strict_overflow_p);
10018
10019 case TRUTH_AND_EXPR:
10020 case TRUTH_OR_EXPR:
10021 case TRUTH_XOR_EXPR:
10022 return tree_binary_nonzero_warnv_p (code, type,
10023 TREE_OPERAND (t, 0),
10024 TREE_OPERAND (t, 1),
10025 strict_overflow_p);
10026
10027 case COND_EXPR:
10028 case CONSTRUCTOR:
10029 case OBJ_TYPE_REF:
10030 case ASSERT_EXPR:
10031 case ADDR_EXPR:
10032 case WITH_SIZE_EXPR:
10033 case SSA_NAME:
10034 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10035
10036 case COMPOUND_EXPR:
10037 case MODIFY_EXPR:
10038 case BIND_EXPR:
10039 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10040 strict_overflow_p);
10041
10042 case SAVE_EXPR:
10043 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10044 strict_overflow_p);
10045
10046 case CALL_EXPR:
10047 {
10048 tree fndecl = get_callee_fndecl (t);
10049 if (!fndecl) return false;
10050 if (flag_delete_null_pointer_checks && !flag_check_new
10051 && DECL_IS_OPERATOR_NEW (fndecl)
10052 && !TREE_NOTHROW (fndecl))
10053 return true;
10054 if (flag_delete_null_pointer_checks
10055 && lookup_attribute ("returns_nonnull",
10056 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10057 return true;
10058 return alloca_call_p (t);
10059 }
10060
10061 default:
10062 break;
10063 }
10064 return false;
10065 }
10066
10067 /* Return true when T is an address and is known to be nonzero.
10068 Handle warnings about undefined signed overflow. */
10069
10070 static bool
10071 tree_expr_nonzero_p (tree t)
10072 {
10073 bool ret, strict_overflow_p;
10074
10075 strict_overflow_p = false;
10076 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10077 if (strict_overflow_p)
10078 fold_overflow_warning (("assuming signed overflow does not occur when "
10079 "determining that expression is always "
10080 "non-zero"),
10081 WARN_STRICT_OVERFLOW_MISC);
10082 return ret;
10083 }
10084
10085 /* Fold a binary expression of code CODE and type TYPE with operands
10086 OP0 and OP1. LOC is the location of the resulting expression.
10087 Return the folded expression if folding is successful. Otherwise,
10088 return NULL_TREE. */
10089
10090 tree
10091 fold_binary_loc (location_t loc,
10092 enum tree_code code, tree type, tree op0, tree op1)
10093 {
10094 enum tree_code_class kind = TREE_CODE_CLASS (code);
10095 tree arg0, arg1, tem;
10096 tree t1 = NULL_TREE;
10097 bool strict_overflow_p;
10098 unsigned int prec;
10099
10100 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10101 && TREE_CODE_LENGTH (code) == 2
10102 && op0 != NULL_TREE
10103 && op1 != NULL_TREE);
10104
10105 arg0 = op0;
10106 arg1 = op1;
10107
10108 /* Strip any conversions that don't change the mode. This is
10109 safe for every expression, except for a comparison expression
10110 because its signedness is derived from its operands. So, in
10111 the latter case, only strip conversions that don't change the
10112 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10113 preserved.
10114
10115 Note that this is done as an internal manipulation within the
10116 constant folder, in order to find the simplest representation
10117 of the arguments so that their form can be studied. In any
10118 cases, the appropriate type conversions should be put back in
10119 the tree that will get out of the constant folder. */
10120
10121 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10122 {
10123 STRIP_SIGN_NOPS (arg0);
10124 STRIP_SIGN_NOPS (arg1);
10125 }
10126 else
10127 {
10128 STRIP_NOPS (arg0);
10129 STRIP_NOPS (arg1);
10130 }
10131
10132 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10133 constant but we can't do arithmetic on them. */
10134 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10135 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10136 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10137 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10138 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10139 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10140 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10141 {
10142 if (kind == tcc_binary)
10143 {
10144 /* Make sure type and arg0 have the same saturating flag. */
10145 gcc_assert (TYPE_SATURATING (type)
10146 == TYPE_SATURATING (TREE_TYPE (arg0)));
10147 tem = const_binop (code, arg0, arg1);
10148 }
10149 else if (kind == tcc_comparison)
10150 tem = fold_relational_const (code, type, arg0, arg1);
10151 else
10152 tem = NULL_TREE;
10153
10154 if (tem != NULL_TREE)
10155 {
10156 if (TREE_TYPE (tem) != type)
10157 tem = fold_convert_loc (loc, type, tem);
10158 return tem;
10159 }
10160 }
10161
10162 /* If this is a commutative operation, and ARG0 is a constant, move it
10163 to ARG1 to reduce the number of tests below. */
10164 if (commutative_tree_code (code)
10165 && tree_swap_operands_p (arg0, arg1, true))
10166 return fold_build2_loc (loc, code, type, op1, op0);
10167
10168 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10169
10170 First check for cases where an arithmetic operation is applied to a
10171 compound, conditional, or comparison operation. Push the arithmetic
10172 operation inside the compound or conditional to see if any folding
10173 can then be done. Convert comparison to conditional for this purpose.
10174 The also optimizes non-constant cases that used to be done in
10175 expand_expr.
10176
10177 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10178 one of the operands is a comparison and the other is a comparison, a
10179 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10180 code below would make the expression more complex. Change it to a
10181 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10182 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10183
10184 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10185 || code == EQ_EXPR || code == NE_EXPR)
10186 && TREE_CODE (type) != VECTOR_TYPE
10187 && ((truth_value_p (TREE_CODE (arg0))
10188 && (truth_value_p (TREE_CODE (arg1))
10189 || (TREE_CODE (arg1) == BIT_AND_EXPR
10190 && integer_onep (TREE_OPERAND (arg1, 1)))))
10191 || (truth_value_p (TREE_CODE (arg1))
10192 && (truth_value_p (TREE_CODE (arg0))
10193 || (TREE_CODE (arg0) == BIT_AND_EXPR
10194 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10195 {
10196 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10197 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10198 : TRUTH_XOR_EXPR,
10199 boolean_type_node,
10200 fold_convert_loc (loc, boolean_type_node, arg0),
10201 fold_convert_loc (loc, boolean_type_node, arg1));
10202
10203 if (code == EQ_EXPR)
10204 tem = invert_truthvalue_loc (loc, tem);
10205
10206 return fold_convert_loc (loc, type, tem);
10207 }
10208
10209 if (TREE_CODE_CLASS (code) == tcc_binary
10210 || TREE_CODE_CLASS (code) == tcc_comparison)
10211 {
10212 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10213 {
10214 tem = fold_build2_loc (loc, code, type,
10215 fold_convert_loc (loc, TREE_TYPE (op0),
10216 TREE_OPERAND (arg0, 1)), op1);
10217 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10218 tem);
10219 }
10220 if (TREE_CODE (arg1) == COMPOUND_EXPR
10221 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10222 {
10223 tem = fold_build2_loc (loc, code, type, op0,
10224 fold_convert_loc (loc, TREE_TYPE (op1),
10225 TREE_OPERAND (arg1, 1)));
10226 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10227 tem);
10228 }
10229
10230 if (TREE_CODE (arg0) == COND_EXPR
10231 || TREE_CODE (arg0) == VEC_COND_EXPR
10232 || COMPARISON_CLASS_P (arg0))
10233 {
10234 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10235 arg0, arg1,
10236 /*cond_first_p=*/1);
10237 if (tem != NULL_TREE)
10238 return tem;
10239 }
10240
10241 if (TREE_CODE (arg1) == COND_EXPR
10242 || TREE_CODE (arg1) == VEC_COND_EXPR
10243 || COMPARISON_CLASS_P (arg1))
10244 {
10245 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10246 arg1, arg0,
10247 /*cond_first_p=*/0);
10248 if (tem != NULL_TREE)
10249 return tem;
10250 }
10251 }
10252
10253 switch (code)
10254 {
10255 case MEM_REF:
10256 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10257 if (TREE_CODE (arg0) == ADDR_EXPR
10258 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10259 {
10260 tree iref = TREE_OPERAND (arg0, 0);
10261 return fold_build2 (MEM_REF, type,
10262 TREE_OPERAND (iref, 0),
10263 int_const_binop (PLUS_EXPR, arg1,
10264 TREE_OPERAND (iref, 1)));
10265 }
10266
10267 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10268 if (TREE_CODE (arg0) == ADDR_EXPR
10269 && handled_component_p (TREE_OPERAND (arg0, 0)))
10270 {
10271 tree base;
10272 HOST_WIDE_INT coffset;
10273 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10274 &coffset);
10275 if (!base)
10276 return NULL_TREE;
10277 return fold_build2 (MEM_REF, type,
10278 build_fold_addr_expr (base),
10279 int_const_binop (PLUS_EXPR, arg1,
10280 size_int (coffset)));
10281 }
10282
10283 return NULL_TREE;
10284
10285 case POINTER_PLUS_EXPR:
10286 /* 0 +p index -> (type)index */
10287 if (integer_zerop (arg0))
10288 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10289
10290 /* PTR +p 0 -> PTR */
10291 if (integer_zerop (arg1))
10292 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10293
10294 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10295 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10296 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10297 return fold_convert_loc (loc, type,
10298 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10299 fold_convert_loc (loc, sizetype,
10300 arg1),
10301 fold_convert_loc (loc, sizetype,
10302 arg0)));
10303
10304 /* (PTR +p B) +p A -> PTR +p (B + A) */
10305 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10306 {
10307 tree inner;
10308 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10309 tree arg00 = TREE_OPERAND (arg0, 0);
10310 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10311 arg01, fold_convert_loc (loc, sizetype, arg1));
10312 return fold_convert_loc (loc, type,
10313 fold_build_pointer_plus_loc (loc,
10314 arg00, inner));
10315 }
10316
10317 /* PTR_CST +p CST -> CST1 */
10318 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10319 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10320 fold_convert_loc (loc, type, arg1));
10321
10322 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10323 of the array. Loop optimizer sometimes produce this type of
10324 expressions. */
10325 if (TREE_CODE (arg0) == ADDR_EXPR)
10326 {
10327 tem = try_move_mult_to_index (loc, arg0,
10328 fold_convert_loc (loc,
10329 ssizetype, arg1));
10330 if (tem)
10331 return fold_convert_loc (loc, type, tem);
10332 }
10333
10334 return NULL_TREE;
10335
10336 case PLUS_EXPR:
10337 /* A + (-B) -> A - B */
10338 if (TREE_CODE (arg1) == NEGATE_EXPR)
10339 return fold_build2_loc (loc, MINUS_EXPR, type,
10340 fold_convert_loc (loc, type, arg0),
10341 fold_convert_loc (loc, type,
10342 TREE_OPERAND (arg1, 0)));
10343 /* (-A) + B -> B - A */
10344 if (TREE_CODE (arg0) == NEGATE_EXPR
10345 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10346 return fold_build2_loc (loc, MINUS_EXPR, type,
10347 fold_convert_loc (loc, type, arg1),
10348 fold_convert_loc (loc, type,
10349 TREE_OPERAND (arg0, 0)));
10350
10351 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10352 {
10353 /* Convert ~A + 1 to -A. */
10354 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10355 && integer_onep (arg1))
10356 return fold_build1_loc (loc, NEGATE_EXPR, type,
10357 fold_convert_loc (loc, type,
10358 TREE_OPERAND (arg0, 0)));
10359
10360 /* ~X + X is -1. */
10361 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10362 && !TYPE_OVERFLOW_TRAPS (type))
10363 {
10364 tree tem = TREE_OPERAND (arg0, 0);
10365
10366 STRIP_NOPS (tem);
10367 if (operand_equal_p (tem, arg1, 0))
10368 {
10369 t1 = build_all_ones_cst (type);
10370 return omit_one_operand_loc (loc, type, t1, arg1);
10371 }
10372 }
10373
10374 /* X + ~X is -1. */
10375 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10376 && !TYPE_OVERFLOW_TRAPS (type))
10377 {
10378 tree tem = TREE_OPERAND (arg1, 0);
10379
10380 STRIP_NOPS (tem);
10381 if (operand_equal_p (arg0, tem, 0))
10382 {
10383 t1 = build_all_ones_cst (type);
10384 return omit_one_operand_loc (loc, type, t1, arg0);
10385 }
10386 }
10387
10388 /* X + (X / CST) * -CST is X % CST. */
10389 if (TREE_CODE (arg1) == MULT_EXPR
10390 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10391 && operand_equal_p (arg0,
10392 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10393 {
10394 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10395 tree cst1 = TREE_OPERAND (arg1, 1);
10396 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10397 cst1, cst0);
10398 if (sum && integer_zerop (sum))
10399 return fold_convert_loc (loc, type,
10400 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10401 TREE_TYPE (arg0), arg0,
10402 cst0));
10403 }
10404 }
10405
10406 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10407 one. Make sure the type is not saturating and has the signedness of
10408 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10409 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10410 if ((TREE_CODE (arg0) == MULT_EXPR
10411 || TREE_CODE (arg1) == MULT_EXPR)
10412 && !TYPE_SATURATING (type)
10413 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10414 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10415 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10416 {
10417 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10418 if (tem)
10419 return tem;
10420 }
10421
10422 if (! FLOAT_TYPE_P (type))
10423 {
10424 if (integer_zerop (arg1))
10425 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10426
10427 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10428 with a constant, and the two constants have no bits in common,
10429 we should treat this as a BIT_IOR_EXPR since this may produce more
10430 simplifications. */
10431 if (TREE_CODE (arg0) == BIT_AND_EXPR
10432 && TREE_CODE (arg1) == BIT_AND_EXPR
10433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10434 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10435 && integer_zerop (const_binop (BIT_AND_EXPR,
10436 TREE_OPERAND (arg0, 1),
10437 TREE_OPERAND (arg1, 1))))
10438 {
10439 code = BIT_IOR_EXPR;
10440 goto bit_ior;
10441 }
10442
10443 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10444 (plus (plus (mult) (mult)) (foo)) so that we can
10445 take advantage of the factoring cases below. */
10446 if (TYPE_OVERFLOW_WRAPS (type)
10447 && (((TREE_CODE (arg0) == PLUS_EXPR
10448 || TREE_CODE (arg0) == MINUS_EXPR)
10449 && TREE_CODE (arg1) == MULT_EXPR)
10450 || ((TREE_CODE (arg1) == PLUS_EXPR
10451 || TREE_CODE (arg1) == MINUS_EXPR)
10452 && TREE_CODE (arg0) == MULT_EXPR)))
10453 {
10454 tree parg0, parg1, parg, marg;
10455 enum tree_code pcode;
10456
10457 if (TREE_CODE (arg1) == MULT_EXPR)
10458 parg = arg0, marg = arg1;
10459 else
10460 parg = arg1, marg = arg0;
10461 pcode = TREE_CODE (parg);
10462 parg0 = TREE_OPERAND (parg, 0);
10463 parg1 = TREE_OPERAND (parg, 1);
10464 STRIP_NOPS (parg0);
10465 STRIP_NOPS (parg1);
10466
10467 if (TREE_CODE (parg0) == MULT_EXPR
10468 && TREE_CODE (parg1) != MULT_EXPR)
10469 return fold_build2_loc (loc, pcode, type,
10470 fold_build2_loc (loc, PLUS_EXPR, type,
10471 fold_convert_loc (loc, type,
10472 parg0),
10473 fold_convert_loc (loc, type,
10474 marg)),
10475 fold_convert_loc (loc, type, parg1));
10476 if (TREE_CODE (parg0) != MULT_EXPR
10477 && TREE_CODE (parg1) == MULT_EXPR)
10478 return
10479 fold_build2_loc (loc, PLUS_EXPR, type,
10480 fold_convert_loc (loc, type, parg0),
10481 fold_build2_loc (loc, pcode, type,
10482 fold_convert_loc (loc, type, marg),
10483 fold_convert_loc (loc, type,
10484 parg1)));
10485 }
10486 }
10487 else
10488 {
10489 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10490 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10491 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10492
10493 /* Likewise if the operands are reversed. */
10494 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10495 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10496
10497 /* Convert X + -C into X - C. */
10498 if (TREE_CODE (arg1) == REAL_CST
10499 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10500 {
10501 tem = fold_negate_const (arg1, type);
10502 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10503 return fold_build2_loc (loc, MINUS_EXPR, type,
10504 fold_convert_loc (loc, type, arg0),
10505 fold_convert_loc (loc, type, tem));
10506 }
10507
10508 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10509 to __complex__ ( x, y ). This is not the same for SNaNs or
10510 if signed zeros are involved. */
10511 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10512 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10513 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10514 {
10515 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10516 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10517 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10518 bool arg0rz = false, arg0iz = false;
10519 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10520 || (arg0i && (arg0iz = real_zerop (arg0i))))
10521 {
10522 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10523 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10524 if (arg0rz && arg1i && real_zerop (arg1i))
10525 {
10526 tree rp = arg1r ? arg1r
10527 : build1 (REALPART_EXPR, rtype, arg1);
10528 tree ip = arg0i ? arg0i
10529 : build1 (IMAGPART_EXPR, rtype, arg0);
10530 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10531 }
10532 else if (arg0iz && arg1r && real_zerop (arg1r))
10533 {
10534 tree rp = arg0r ? arg0r
10535 : build1 (REALPART_EXPR, rtype, arg0);
10536 tree ip = arg1i ? arg1i
10537 : build1 (IMAGPART_EXPR, rtype, arg1);
10538 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10539 }
10540 }
10541 }
10542
10543 if (flag_unsafe_math_optimizations
10544 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10545 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10546 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10547 return tem;
10548
10549 /* Convert x+x into x*2.0. */
10550 if (operand_equal_p (arg0, arg1, 0)
10551 && SCALAR_FLOAT_TYPE_P (type))
10552 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10553 build_real (type, dconst2));
10554
10555 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10556 We associate floats only if the user has specified
10557 -fassociative-math. */
10558 if (flag_associative_math
10559 && TREE_CODE (arg1) == PLUS_EXPR
10560 && TREE_CODE (arg0) != MULT_EXPR)
10561 {
10562 tree tree10 = TREE_OPERAND (arg1, 0);
10563 tree tree11 = TREE_OPERAND (arg1, 1);
10564 if (TREE_CODE (tree11) == MULT_EXPR
10565 && TREE_CODE (tree10) == MULT_EXPR)
10566 {
10567 tree tree0;
10568 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10569 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10570 }
10571 }
10572 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10573 We associate floats only if the user has specified
10574 -fassociative-math. */
10575 if (flag_associative_math
10576 && TREE_CODE (arg0) == PLUS_EXPR
10577 && TREE_CODE (arg1) != MULT_EXPR)
10578 {
10579 tree tree00 = TREE_OPERAND (arg0, 0);
10580 tree tree01 = TREE_OPERAND (arg0, 1);
10581 if (TREE_CODE (tree01) == MULT_EXPR
10582 && TREE_CODE (tree00) == MULT_EXPR)
10583 {
10584 tree tree0;
10585 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10586 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10587 }
10588 }
10589 }
10590
10591 bit_rotate:
10592 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10593 is a rotate of A by C1 bits. */
10594 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10595 is a rotate of A by B bits. */
10596 {
10597 enum tree_code code0, code1;
10598 tree rtype;
10599 code0 = TREE_CODE (arg0);
10600 code1 = TREE_CODE (arg1);
10601 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10602 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10603 && operand_equal_p (TREE_OPERAND (arg0, 0),
10604 TREE_OPERAND (arg1, 0), 0)
10605 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10606 TYPE_UNSIGNED (rtype))
10607 /* Only create rotates in complete modes. Other cases are not
10608 expanded properly. */
10609 && (element_precision (rtype)
10610 == element_precision (TYPE_MODE (rtype))))
10611 {
10612 tree tree01, tree11;
10613 enum tree_code code01, code11;
10614
10615 tree01 = TREE_OPERAND (arg0, 1);
10616 tree11 = TREE_OPERAND (arg1, 1);
10617 STRIP_NOPS (tree01);
10618 STRIP_NOPS (tree11);
10619 code01 = TREE_CODE (tree01);
10620 code11 = TREE_CODE (tree11);
10621 if (code01 == INTEGER_CST
10622 && code11 == INTEGER_CST
10623 && TREE_INT_CST_HIGH (tree01) == 0
10624 && TREE_INT_CST_HIGH (tree11) == 0
10625 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10626 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10627 {
10628 tem = build2_loc (loc, LROTATE_EXPR,
10629 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10630 TREE_OPERAND (arg0, 0),
10631 code0 == LSHIFT_EXPR ? tree01 : tree11);
10632 return fold_convert_loc (loc, type, tem);
10633 }
10634 else if (code11 == MINUS_EXPR)
10635 {
10636 tree tree110, tree111;
10637 tree110 = TREE_OPERAND (tree11, 0);
10638 tree111 = TREE_OPERAND (tree11, 1);
10639 STRIP_NOPS (tree110);
10640 STRIP_NOPS (tree111);
10641 if (TREE_CODE (tree110) == INTEGER_CST
10642 && 0 == compare_tree_int (tree110,
10643 element_precision
10644 (TREE_TYPE (TREE_OPERAND
10645 (arg0, 0))))
10646 && operand_equal_p (tree01, tree111, 0))
10647 return
10648 fold_convert_loc (loc, type,
10649 build2 ((code0 == LSHIFT_EXPR
10650 ? LROTATE_EXPR
10651 : RROTATE_EXPR),
10652 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10653 TREE_OPERAND (arg0, 0), tree01));
10654 }
10655 else if (code01 == MINUS_EXPR)
10656 {
10657 tree tree010, tree011;
10658 tree010 = TREE_OPERAND (tree01, 0);
10659 tree011 = TREE_OPERAND (tree01, 1);
10660 STRIP_NOPS (tree010);
10661 STRIP_NOPS (tree011);
10662 if (TREE_CODE (tree010) == INTEGER_CST
10663 && 0 == compare_tree_int (tree010,
10664 element_precision
10665 (TREE_TYPE (TREE_OPERAND
10666 (arg0, 0))))
10667 && operand_equal_p (tree11, tree011, 0))
10668 return fold_convert_loc
10669 (loc, type,
10670 build2 ((code0 != LSHIFT_EXPR
10671 ? LROTATE_EXPR
10672 : RROTATE_EXPR),
10673 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10674 TREE_OPERAND (arg0, 0), tree11));
10675 }
10676 }
10677 }
10678
10679 associate:
10680 /* In most languages, can't associate operations on floats through
10681 parentheses. Rather than remember where the parentheses were, we
10682 don't associate floats at all, unless the user has specified
10683 -fassociative-math.
10684 And, we need to make sure type is not saturating. */
10685
10686 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10687 && !TYPE_SATURATING (type))
10688 {
10689 tree var0, con0, lit0, minus_lit0;
10690 tree var1, con1, lit1, minus_lit1;
10691 tree atype = type;
10692 bool ok = true;
10693
10694 /* Split both trees into variables, constants, and literals. Then
10695 associate each group together, the constants with literals,
10696 then the result with variables. This increases the chances of
10697 literals being recombined later and of generating relocatable
10698 expressions for the sum of a constant and literal. */
10699 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10700 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10701 code == MINUS_EXPR);
10702
10703 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10704 if (code == MINUS_EXPR)
10705 code = PLUS_EXPR;
10706
10707 /* With undefined overflow prefer doing association in a type
10708 which wraps on overflow, if that is one of the operand types. */
10709 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10710 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10711 {
10712 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10713 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10714 atype = TREE_TYPE (arg0);
10715 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10716 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10717 atype = TREE_TYPE (arg1);
10718 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10719 }
10720
10721 /* With undefined overflow we can only associate constants with one
10722 variable, and constants whose association doesn't overflow. */
10723 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10724 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10725 {
10726 if (var0 && var1)
10727 {
10728 tree tmp0 = var0;
10729 tree tmp1 = var1;
10730
10731 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10732 tmp0 = TREE_OPERAND (tmp0, 0);
10733 if (CONVERT_EXPR_P (tmp0)
10734 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10735 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10736 <= TYPE_PRECISION (atype)))
10737 tmp0 = TREE_OPERAND (tmp0, 0);
10738 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10739 tmp1 = TREE_OPERAND (tmp1, 0);
10740 if (CONVERT_EXPR_P (tmp1)
10741 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10742 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10743 <= TYPE_PRECISION (atype)))
10744 tmp1 = TREE_OPERAND (tmp1, 0);
10745 /* The only case we can still associate with two variables
10746 is if they are the same, modulo negation and bit-pattern
10747 preserving conversions. */
10748 if (!operand_equal_p (tmp0, tmp1, 0))
10749 ok = false;
10750 }
10751 }
10752
10753 /* Only do something if we found more than two objects. Otherwise,
10754 nothing has changed and we risk infinite recursion. */
10755 if (ok
10756 && (2 < ((var0 != 0) + (var1 != 0)
10757 + (con0 != 0) + (con1 != 0)
10758 + (lit0 != 0) + (lit1 != 0)
10759 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10760 {
10761 bool any_overflows = false;
10762 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10763 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10764 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10765 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10766 var0 = associate_trees (loc, var0, var1, code, atype);
10767 con0 = associate_trees (loc, con0, con1, code, atype);
10768 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10769 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10770 code, atype);
10771
10772 /* Preserve the MINUS_EXPR if the negative part of the literal is
10773 greater than the positive part. Otherwise, the multiplicative
10774 folding code (i.e extract_muldiv) may be fooled in case
10775 unsigned constants are subtracted, like in the following
10776 example: ((X*2 + 4) - 8U)/2. */
10777 if (minus_lit0 && lit0)
10778 {
10779 if (TREE_CODE (lit0) == INTEGER_CST
10780 && TREE_CODE (minus_lit0) == INTEGER_CST
10781 && tree_int_cst_lt (lit0, minus_lit0))
10782 {
10783 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10784 MINUS_EXPR, atype);
10785 lit0 = 0;
10786 }
10787 else
10788 {
10789 lit0 = associate_trees (loc, lit0, minus_lit0,
10790 MINUS_EXPR, atype);
10791 minus_lit0 = 0;
10792 }
10793 }
10794
10795 /* Don't introduce overflows through reassociation. */
10796 if (!any_overflows
10797 && ((lit0 && TREE_OVERFLOW (lit0))
10798 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10799 return NULL_TREE;
10800
10801 if (minus_lit0)
10802 {
10803 if (con0 == 0)
10804 return
10805 fold_convert_loc (loc, type,
10806 associate_trees (loc, var0, minus_lit0,
10807 MINUS_EXPR, atype));
10808 else
10809 {
10810 con0 = associate_trees (loc, con0, minus_lit0,
10811 MINUS_EXPR, atype);
10812 return
10813 fold_convert_loc (loc, type,
10814 associate_trees (loc, var0, con0,
10815 PLUS_EXPR, atype));
10816 }
10817 }
10818
10819 con0 = associate_trees (loc, con0, lit0, code, atype);
10820 return
10821 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10822 code, atype));
10823 }
10824 }
10825
10826 return NULL_TREE;
10827
10828 case MINUS_EXPR:
10829 /* Pointer simplifications for subtraction, simple reassociations. */
10830 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10831 {
10832 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10833 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10834 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10835 {
10836 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10837 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10838 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10839 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10840 return fold_build2_loc (loc, PLUS_EXPR, type,
10841 fold_build2_loc (loc, MINUS_EXPR, type,
10842 arg00, arg10),
10843 fold_build2_loc (loc, MINUS_EXPR, type,
10844 arg01, arg11));
10845 }
10846 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10847 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10848 {
10849 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10850 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10851 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10852 fold_convert_loc (loc, type, arg1));
10853 if (tmp)
10854 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10855 }
10856 }
10857 /* A - (-B) -> A + B */
10858 if (TREE_CODE (arg1) == NEGATE_EXPR)
10859 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10860 fold_convert_loc (loc, type,
10861 TREE_OPERAND (arg1, 0)));
10862 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10863 if (TREE_CODE (arg0) == NEGATE_EXPR
10864 && negate_expr_p (arg1)
10865 && reorder_operands_p (arg0, arg1))
10866 return fold_build2_loc (loc, MINUS_EXPR, type,
10867 fold_convert_loc (loc, type,
10868 negate_expr (arg1)),
10869 fold_convert_loc (loc, type,
10870 TREE_OPERAND (arg0, 0)));
10871 /* Convert -A - 1 to ~A. */
10872 if (TREE_CODE (type) != COMPLEX_TYPE
10873 && TREE_CODE (arg0) == NEGATE_EXPR
10874 && integer_onep (arg1)
10875 && !TYPE_OVERFLOW_TRAPS (type))
10876 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10877 fold_convert_loc (loc, type,
10878 TREE_OPERAND (arg0, 0)));
10879
10880 /* Convert -1 - A to ~A. */
10881 if (TREE_CODE (type) != COMPLEX_TYPE
10882 && integer_all_onesp (arg0))
10883 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10884
10885
10886 /* X - (X / Y) * Y is X % Y. */
10887 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10888 && TREE_CODE (arg1) == MULT_EXPR
10889 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10890 && operand_equal_p (arg0,
10891 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10892 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10893 TREE_OPERAND (arg1, 1), 0))
10894 return
10895 fold_convert_loc (loc, type,
10896 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10897 arg0, TREE_OPERAND (arg1, 1)));
10898
10899 if (! FLOAT_TYPE_P (type))
10900 {
10901 if (integer_zerop (arg0))
10902 return negate_expr (fold_convert_loc (loc, type, arg1));
10903 if (integer_zerop (arg1))
10904 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10905
10906 /* Fold A - (A & B) into ~B & A. */
10907 if (!TREE_SIDE_EFFECTS (arg0)
10908 && TREE_CODE (arg1) == BIT_AND_EXPR)
10909 {
10910 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10911 {
10912 tree arg10 = fold_convert_loc (loc, type,
10913 TREE_OPERAND (arg1, 0));
10914 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10915 fold_build1_loc (loc, BIT_NOT_EXPR,
10916 type, arg10),
10917 fold_convert_loc (loc, type, arg0));
10918 }
10919 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10920 {
10921 tree arg11 = fold_convert_loc (loc,
10922 type, TREE_OPERAND (arg1, 1));
10923 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10924 fold_build1_loc (loc, BIT_NOT_EXPR,
10925 type, arg11),
10926 fold_convert_loc (loc, type, arg0));
10927 }
10928 }
10929
10930 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10931 any power of 2 minus 1. */
10932 if (TREE_CODE (arg0) == BIT_AND_EXPR
10933 && TREE_CODE (arg1) == BIT_AND_EXPR
10934 && operand_equal_p (TREE_OPERAND (arg0, 0),
10935 TREE_OPERAND (arg1, 0), 0))
10936 {
10937 tree mask0 = TREE_OPERAND (arg0, 1);
10938 tree mask1 = TREE_OPERAND (arg1, 1);
10939 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10940
10941 if (operand_equal_p (tem, mask1, 0))
10942 {
10943 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10944 TREE_OPERAND (arg0, 0), mask1);
10945 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10946 }
10947 }
10948 }
10949
10950 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10951 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10952 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10953
10954 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10955 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10956 (-ARG1 + ARG0) reduces to -ARG1. */
10957 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10958 return negate_expr (fold_convert_loc (loc, type, arg1));
10959
10960 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10961 __complex__ ( x, -y ). This is not the same for SNaNs or if
10962 signed zeros are involved. */
10963 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10964 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10965 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10966 {
10967 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10968 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10969 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10970 bool arg0rz = false, arg0iz = false;
10971 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10972 || (arg0i && (arg0iz = real_zerop (arg0i))))
10973 {
10974 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10975 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10976 if (arg0rz && arg1i && real_zerop (arg1i))
10977 {
10978 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10979 arg1r ? arg1r
10980 : build1 (REALPART_EXPR, rtype, arg1));
10981 tree ip = arg0i ? arg0i
10982 : build1 (IMAGPART_EXPR, rtype, arg0);
10983 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10984 }
10985 else if (arg0iz && arg1r && real_zerop (arg1r))
10986 {
10987 tree rp = arg0r ? arg0r
10988 : build1 (REALPART_EXPR, rtype, arg0);
10989 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10990 arg1i ? arg1i
10991 : build1 (IMAGPART_EXPR, rtype, arg1));
10992 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10993 }
10994 }
10995 }
10996
10997 /* Fold &x - &x. This can happen from &x.foo - &x.
10998 This is unsafe for certain floats even in non-IEEE formats.
10999 In IEEE, it is unsafe because it does wrong for NaNs.
11000 Also note that operand_equal_p is always false if an operand
11001 is volatile. */
11002
11003 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
11004 && operand_equal_p (arg0, arg1, 0))
11005 return build_zero_cst (type);
11006
11007 /* A - B -> A + (-B) if B is easily negatable. */
11008 if (negate_expr_p (arg1)
11009 && ((FLOAT_TYPE_P (type)
11010 /* Avoid this transformation if B is a positive REAL_CST. */
11011 && (TREE_CODE (arg1) != REAL_CST
11012 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11013 || INTEGRAL_TYPE_P (type)))
11014 return fold_build2_loc (loc, PLUS_EXPR, type,
11015 fold_convert_loc (loc, type, arg0),
11016 fold_convert_loc (loc, type,
11017 negate_expr (arg1)));
11018
11019 /* Try folding difference of addresses. */
11020 {
11021 HOST_WIDE_INT diff;
11022
11023 if ((TREE_CODE (arg0) == ADDR_EXPR
11024 || TREE_CODE (arg1) == ADDR_EXPR)
11025 && ptr_difference_const (arg0, arg1, &diff))
11026 return build_int_cst_type (type, diff);
11027 }
11028
11029 /* Fold &a[i] - &a[j] to i-j. */
11030 if (TREE_CODE (arg0) == ADDR_EXPR
11031 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11032 && TREE_CODE (arg1) == ADDR_EXPR
11033 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11034 {
11035 tree tem = fold_addr_of_array_ref_difference (loc, type,
11036 TREE_OPERAND (arg0, 0),
11037 TREE_OPERAND (arg1, 0));
11038 if (tem)
11039 return tem;
11040 }
11041
11042 if (FLOAT_TYPE_P (type)
11043 && flag_unsafe_math_optimizations
11044 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11045 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11046 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11047 return tem;
11048
11049 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11050 one. Make sure the type is not saturating and has the signedness of
11051 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11052 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11053 if ((TREE_CODE (arg0) == MULT_EXPR
11054 || TREE_CODE (arg1) == MULT_EXPR)
11055 && !TYPE_SATURATING (type)
11056 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11057 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11058 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11059 {
11060 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11061 if (tem)
11062 return tem;
11063 }
11064
11065 goto associate;
11066
11067 case MULT_EXPR:
11068 /* (-A) * (-B) -> A * B */
11069 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11070 return fold_build2_loc (loc, MULT_EXPR, type,
11071 fold_convert_loc (loc, type,
11072 TREE_OPERAND (arg0, 0)),
11073 fold_convert_loc (loc, type,
11074 negate_expr (arg1)));
11075 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11076 return fold_build2_loc (loc, MULT_EXPR, type,
11077 fold_convert_loc (loc, type,
11078 negate_expr (arg0)),
11079 fold_convert_loc (loc, type,
11080 TREE_OPERAND (arg1, 0)));
11081
11082 if (! FLOAT_TYPE_P (type))
11083 {
11084 if (integer_zerop (arg1))
11085 return omit_one_operand_loc (loc, type, arg1, arg0);
11086 if (integer_onep (arg1))
11087 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11088 /* Transform x * -1 into -x. Make sure to do the negation
11089 on the original operand with conversions not stripped
11090 because we can only strip non-sign-changing conversions. */
11091 if (integer_minus_onep (arg1))
11092 return fold_convert_loc (loc, type, negate_expr (op0));
11093 /* Transform x * -C into -x * C if x is easily negatable. */
11094 if (TREE_CODE (arg1) == INTEGER_CST
11095 && tree_int_cst_sgn (arg1) == -1
11096 && negate_expr_p (arg0)
11097 && (tem = negate_expr (arg1)) != arg1
11098 && !TREE_OVERFLOW (tem))
11099 return fold_build2_loc (loc, MULT_EXPR, type,
11100 fold_convert_loc (loc, type,
11101 negate_expr (arg0)),
11102 tem);
11103
11104 /* (a * (1 << b)) is (a << b) */
11105 if (TREE_CODE (arg1) == LSHIFT_EXPR
11106 && integer_onep (TREE_OPERAND (arg1, 0)))
11107 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11108 TREE_OPERAND (arg1, 1));
11109 if (TREE_CODE (arg0) == LSHIFT_EXPR
11110 && integer_onep (TREE_OPERAND (arg0, 0)))
11111 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11112 TREE_OPERAND (arg0, 1));
11113
11114 /* (A + A) * C -> A * 2 * C */
11115 if (TREE_CODE (arg0) == PLUS_EXPR
11116 && TREE_CODE (arg1) == INTEGER_CST
11117 && operand_equal_p (TREE_OPERAND (arg0, 0),
11118 TREE_OPERAND (arg0, 1), 0))
11119 return fold_build2_loc (loc, MULT_EXPR, type,
11120 omit_one_operand_loc (loc, type,
11121 TREE_OPERAND (arg0, 0),
11122 TREE_OPERAND (arg0, 1)),
11123 fold_build2_loc (loc, MULT_EXPR, type,
11124 build_int_cst (type, 2) , arg1));
11125
11126 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11127 sign-changing only. */
11128 if (TREE_CODE (arg1) == INTEGER_CST
11129 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11130 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11131 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11132
11133 strict_overflow_p = false;
11134 if (TREE_CODE (arg1) == INTEGER_CST
11135 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11136 &strict_overflow_p)))
11137 {
11138 if (strict_overflow_p)
11139 fold_overflow_warning (("assuming signed overflow does not "
11140 "occur when simplifying "
11141 "multiplication"),
11142 WARN_STRICT_OVERFLOW_MISC);
11143 return fold_convert_loc (loc, type, tem);
11144 }
11145
11146 /* Optimize z * conj(z) for integer complex numbers. */
11147 if (TREE_CODE (arg0) == CONJ_EXPR
11148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11149 return fold_mult_zconjz (loc, type, arg1);
11150 if (TREE_CODE (arg1) == CONJ_EXPR
11151 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11152 return fold_mult_zconjz (loc, type, arg0);
11153 }
11154 else
11155 {
11156 /* Maybe fold x * 0 to 0. The expressions aren't the same
11157 when x is NaN, since x * 0 is also NaN. Nor are they the
11158 same in modes with signed zeros, since multiplying a
11159 negative value by 0 gives -0, not +0. */
11160 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11161 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11162 && real_zerop (arg1))
11163 return omit_one_operand_loc (loc, type, arg1, arg0);
11164 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11165 Likewise for complex arithmetic with signed zeros. */
11166 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11167 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11168 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11169 && real_onep (arg1))
11170 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11171
11172 /* Transform x * -1.0 into -x. */
11173 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11174 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11175 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11176 && real_minus_onep (arg1))
11177 return fold_convert_loc (loc, type, negate_expr (arg0));
11178
11179 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11180 the result for floating point types due to rounding so it is applied
11181 only if -fassociative-math was specify. */
11182 if (flag_associative_math
11183 && TREE_CODE (arg0) == RDIV_EXPR
11184 && TREE_CODE (arg1) == REAL_CST
11185 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11186 {
11187 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11188 arg1);
11189 if (tem)
11190 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11191 TREE_OPERAND (arg0, 1));
11192 }
11193
11194 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11195 if (operand_equal_p (arg0, arg1, 0))
11196 {
11197 tree tem = fold_strip_sign_ops (arg0);
11198 if (tem != NULL_TREE)
11199 {
11200 tem = fold_convert_loc (loc, type, tem);
11201 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11202 }
11203 }
11204
11205 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11206 This is not the same for NaNs or if signed zeros are
11207 involved. */
11208 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11209 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11210 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11211 && TREE_CODE (arg1) == COMPLEX_CST
11212 && real_zerop (TREE_REALPART (arg1)))
11213 {
11214 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11215 if (real_onep (TREE_IMAGPART (arg1)))
11216 return
11217 fold_build2_loc (loc, COMPLEX_EXPR, type,
11218 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11219 rtype, arg0)),
11220 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11221 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11222 return
11223 fold_build2_loc (loc, COMPLEX_EXPR, type,
11224 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11225 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11226 rtype, arg0)));
11227 }
11228
11229 /* Optimize z * conj(z) for floating point complex numbers.
11230 Guarded by flag_unsafe_math_optimizations as non-finite
11231 imaginary components don't produce scalar results. */
11232 if (flag_unsafe_math_optimizations
11233 && TREE_CODE (arg0) == CONJ_EXPR
11234 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11235 return fold_mult_zconjz (loc, type, arg1);
11236 if (flag_unsafe_math_optimizations
11237 && TREE_CODE (arg1) == CONJ_EXPR
11238 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11239 return fold_mult_zconjz (loc, type, arg0);
11240
11241 if (flag_unsafe_math_optimizations)
11242 {
11243 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11244 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11245
11246 /* Optimizations of root(...)*root(...). */
11247 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11248 {
11249 tree rootfn, arg;
11250 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11251 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11252
11253 /* Optimize sqrt(x)*sqrt(x) as x. */
11254 if (BUILTIN_SQRT_P (fcode0)
11255 && operand_equal_p (arg00, arg10, 0)
11256 && ! HONOR_SNANS (TYPE_MODE (type)))
11257 return arg00;
11258
11259 /* Optimize root(x)*root(y) as root(x*y). */
11260 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11261 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11262 return build_call_expr_loc (loc, rootfn, 1, arg);
11263 }
11264
11265 /* Optimize expN(x)*expN(y) as expN(x+y). */
11266 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11267 {
11268 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11269 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11270 CALL_EXPR_ARG (arg0, 0),
11271 CALL_EXPR_ARG (arg1, 0));
11272 return build_call_expr_loc (loc, expfn, 1, arg);
11273 }
11274
11275 /* Optimizations of pow(...)*pow(...). */
11276 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11277 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11278 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11279 {
11280 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11281 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11282 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11283 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11284
11285 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11286 if (operand_equal_p (arg01, arg11, 0))
11287 {
11288 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11289 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11290 arg00, arg10);
11291 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11292 }
11293
11294 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11295 if (operand_equal_p (arg00, arg10, 0))
11296 {
11297 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11298 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11299 arg01, arg11);
11300 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11301 }
11302 }
11303
11304 /* Optimize tan(x)*cos(x) as sin(x). */
11305 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11306 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11307 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11308 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11309 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11310 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11311 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11312 CALL_EXPR_ARG (arg1, 0), 0))
11313 {
11314 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11315
11316 if (sinfn != NULL_TREE)
11317 return build_call_expr_loc (loc, sinfn, 1,
11318 CALL_EXPR_ARG (arg0, 0));
11319 }
11320
11321 /* Optimize x*pow(x,c) as pow(x,c+1). */
11322 if (fcode1 == BUILT_IN_POW
11323 || fcode1 == BUILT_IN_POWF
11324 || fcode1 == BUILT_IN_POWL)
11325 {
11326 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11327 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11328 if (TREE_CODE (arg11) == REAL_CST
11329 && !TREE_OVERFLOW (arg11)
11330 && operand_equal_p (arg0, arg10, 0))
11331 {
11332 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11333 REAL_VALUE_TYPE c;
11334 tree arg;
11335
11336 c = TREE_REAL_CST (arg11);
11337 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11338 arg = build_real (type, c);
11339 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11340 }
11341 }
11342
11343 /* Optimize pow(x,c)*x as pow(x,c+1). */
11344 if (fcode0 == BUILT_IN_POW
11345 || fcode0 == BUILT_IN_POWF
11346 || fcode0 == BUILT_IN_POWL)
11347 {
11348 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11349 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11350 if (TREE_CODE (arg01) == REAL_CST
11351 && !TREE_OVERFLOW (arg01)
11352 && operand_equal_p (arg1, arg00, 0))
11353 {
11354 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11355 REAL_VALUE_TYPE c;
11356 tree arg;
11357
11358 c = TREE_REAL_CST (arg01);
11359 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11360 arg = build_real (type, c);
11361 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11362 }
11363 }
11364
11365 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11366 if (!in_gimple_form
11367 && optimize
11368 && operand_equal_p (arg0, arg1, 0))
11369 {
11370 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11371
11372 if (powfn)
11373 {
11374 tree arg = build_real (type, dconst2);
11375 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11376 }
11377 }
11378 }
11379 }
11380 goto associate;
11381
11382 case BIT_IOR_EXPR:
11383 bit_ior:
11384 if (integer_all_onesp (arg1))
11385 return omit_one_operand_loc (loc, type, arg1, arg0);
11386 if (integer_zerop (arg1))
11387 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11388 if (operand_equal_p (arg0, arg1, 0))
11389 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11390
11391 /* ~X | X is -1. */
11392 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11393 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11394 {
11395 t1 = build_zero_cst (type);
11396 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11397 return omit_one_operand_loc (loc, type, t1, arg1);
11398 }
11399
11400 /* X | ~X is -1. */
11401 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11402 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11403 {
11404 t1 = build_zero_cst (type);
11405 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11406 return omit_one_operand_loc (loc, type, t1, arg0);
11407 }
11408
11409 /* Canonicalize (X & C1) | C2. */
11410 if (TREE_CODE (arg0) == BIT_AND_EXPR
11411 && TREE_CODE (arg1) == INTEGER_CST
11412 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11413 {
11414 double_int c1, c2, c3, msk;
11415 int width = TYPE_PRECISION (type), w;
11416 bool try_simplify = true;
11417
11418 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11419 c2 = tree_to_double_int (arg1);
11420
11421 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11422 if ((c1 & c2) == c1)
11423 return omit_one_operand_loc (loc, type, arg1,
11424 TREE_OPERAND (arg0, 0));
11425
11426 msk = double_int::mask (width);
11427
11428 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11429 if (msk.and_not (c1 | c2).is_zero ())
11430 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11431 TREE_OPERAND (arg0, 0), arg1);
11432
11433 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11434 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11435 mode which allows further optimizations. */
11436 c1 &= msk;
11437 c2 &= msk;
11438 c3 = c1.and_not (c2);
11439 for (w = BITS_PER_UNIT;
11440 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11441 w <<= 1)
11442 {
11443 unsigned HOST_WIDE_INT mask
11444 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11445 if (((c1.low | c2.low) & mask) == mask
11446 && (c1.low & ~mask) == 0 && c1.high == 0)
11447 {
11448 c3 = double_int::from_uhwi (mask);
11449 break;
11450 }
11451 }
11452
11453 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11454 with that optimization from the BIT_AND_EXPR optimizations.
11455 This could end up in an infinite recursion. */
11456 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11457 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11458 == INTEGER_CST)
11459 {
11460 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11461 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11462
11463 try_simplify = (masked != c1);
11464 }
11465
11466 if (try_simplify && c3 != c1)
11467 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11468 fold_build2_loc (loc, BIT_AND_EXPR, type,
11469 TREE_OPERAND (arg0, 0),
11470 double_int_to_tree (type,
11471 c3)),
11472 arg1);
11473 }
11474
11475 /* (X & Y) | Y is (X, Y). */
11476 if (TREE_CODE (arg0) == BIT_AND_EXPR
11477 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11478 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11479 /* (X & Y) | X is (Y, X). */
11480 if (TREE_CODE (arg0) == BIT_AND_EXPR
11481 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11482 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11483 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11484 /* X | (X & Y) is (Y, X). */
11485 if (TREE_CODE (arg1) == BIT_AND_EXPR
11486 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11487 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11488 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11489 /* X | (Y & X) is (Y, X). */
11490 if (TREE_CODE (arg1) == BIT_AND_EXPR
11491 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11492 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11493 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11494
11495 /* (X & ~Y) | (~X & Y) is X ^ Y */
11496 if (TREE_CODE (arg0) == BIT_AND_EXPR
11497 && TREE_CODE (arg1) == BIT_AND_EXPR)
11498 {
11499 tree a0, a1, l0, l1, n0, n1;
11500
11501 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11502 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11503
11504 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11505 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11506
11507 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11508 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11509
11510 if ((operand_equal_p (n0, a0, 0)
11511 && operand_equal_p (n1, a1, 0))
11512 || (operand_equal_p (n0, a1, 0)
11513 && operand_equal_p (n1, a0, 0)))
11514 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11515 }
11516
11517 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11518 if (t1 != NULL_TREE)
11519 return t1;
11520
11521 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11522
11523 This results in more efficient code for machines without a NAND
11524 instruction. Combine will canonicalize to the first form
11525 which will allow use of NAND instructions provided by the
11526 backend if they exist. */
11527 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11528 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11529 {
11530 return
11531 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11532 build2 (BIT_AND_EXPR, type,
11533 fold_convert_loc (loc, type,
11534 TREE_OPERAND (arg0, 0)),
11535 fold_convert_loc (loc, type,
11536 TREE_OPERAND (arg1, 0))));
11537 }
11538
11539 /* See if this can be simplified into a rotate first. If that
11540 is unsuccessful continue in the association code. */
11541 goto bit_rotate;
11542
11543 case BIT_XOR_EXPR:
11544 if (integer_zerop (arg1))
11545 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11546 if (integer_all_onesp (arg1))
11547 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11548 if (operand_equal_p (arg0, arg1, 0))
11549 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11550
11551 /* ~X ^ X is -1. */
11552 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11553 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11554 {
11555 t1 = build_zero_cst (type);
11556 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11557 return omit_one_operand_loc (loc, type, t1, arg1);
11558 }
11559
11560 /* X ^ ~X is -1. */
11561 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11562 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11563 {
11564 t1 = build_zero_cst (type);
11565 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11566 return omit_one_operand_loc (loc, type, t1, arg0);
11567 }
11568
11569 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11570 with a constant, and the two constants have no bits in common,
11571 we should treat this as a BIT_IOR_EXPR since this may produce more
11572 simplifications. */
11573 if (TREE_CODE (arg0) == BIT_AND_EXPR
11574 && TREE_CODE (arg1) == BIT_AND_EXPR
11575 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11576 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11577 && integer_zerop (const_binop (BIT_AND_EXPR,
11578 TREE_OPERAND (arg0, 1),
11579 TREE_OPERAND (arg1, 1))))
11580 {
11581 code = BIT_IOR_EXPR;
11582 goto bit_ior;
11583 }
11584
11585 /* (X | Y) ^ X -> Y & ~ X*/
11586 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11587 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11588 {
11589 tree t2 = TREE_OPERAND (arg0, 1);
11590 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11591 arg1);
11592 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11593 fold_convert_loc (loc, type, t2),
11594 fold_convert_loc (loc, type, t1));
11595 return t1;
11596 }
11597
11598 /* (Y | X) ^ X -> Y & ~ X*/
11599 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11600 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11601 {
11602 tree t2 = TREE_OPERAND (arg0, 0);
11603 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11604 arg1);
11605 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11606 fold_convert_loc (loc, type, t2),
11607 fold_convert_loc (loc, type, t1));
11608 return t1;
11609 }
11610
11611 /* X ^ (X | Y) -> Y & ~ X*/
11612 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11613 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11614 {
11615 tree t2 = TREE_OPERAND (arg1, 1);
11616 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11617 arg0);
11618 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11619 fold_convert_loc (loc, type, t2),
11620 fold_convert_loc (loc, type, t1));
11621 return t1;
11622 }
11623
11624 /* X ^ (Y | X) -> Y & ~ X*/
11625 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11626 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11627 {
11628 tree t2 = TREE_OPERAND (arg1, 0);
11629 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11630 arg0);
11631 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11632 fold_convert_loc (loc, type, t2),
11633 fold_convert_loc (loc, type, t1));
11634 return t1;
11635 }
11636
11637 /* Convert ~X ^ ~Y to X ^ Y. */
11638 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11639 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11640 return fold_build2_loc (loc, code, type,
11641 fold_convert_loc (loc, type,
11642 TREE_OPERAND (arg0, 0)),
11643 fold_convert_loc (loc, type,
11644 TREE_OPERAND (arg1, 0)));
11645
11646 /* Convert ~X ^ C to X ^ ~C. */
11647 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11648 && TREE_CODE (arg1) == INTEGER_CST)
11649 return fold_build2_loc (loc, code, type,
11650 fold_convert_loc (loc, type,
11651 TREE_OPERAND (arg0, 0)),
11652 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11653
11654 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11655 if (TREE_CODE (arg0) == BIT_AND_EXPR
11656 && integer_onep (TREE_OPERAND (arg0, 1))
11657 && integer_onep (arg1))
11658 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11659 build_zero_cst (TREE_TYPE (arg0)));
11660
11661 /* Fold (X & Y) ^ Y as ~X & Y. */
11662 if (TREE_CODE (arg0) == BIT_AND_EXPR
11663 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11664 {
11665 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11666 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11667 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11668 fold_convert_loc (loc, type, arg1));
11669 }
11670 /* Fold (X & Y) ^ X as ~Y & X. */
11671 if (TREE_CODE (arg0) == BIT_AND_EXPR
11672 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11673 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11674 {
11675 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11676 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11677 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11678 fold_convert_loc (loc, type, arg1));
11679 }
11680 /* Fold X ^ (X & Y) as X & ~Y. */
11681 if (TREE_CODE (arg1) == BIT_AND_EXPR
11682 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11683 {
11684 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11685 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11686 fold_convert_loc (loc, type, arg0),
11687 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11688 }
11689 /* Fold X ^ (Y & X) as ~Y & X. */
11690 if (TREE_CODE (arg1) == BIT_AND_EXPR
11691 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11692 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11693 {
11694 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11695 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11696 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11697 fold_convert_loc (loc, type, arg0));
11698 }
11699
11700 /* See if this can be simplified into a rotate first. If that
11701 is unsuccessful continue in the association code. */
11702 goto bit_rotate;
11703
11704 case BIT_AND_EXPR:
11705 if (integer_all_onesp (arg1))
11706 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11707 if (integer_zerop (arg1))
11708 return omit_one_operand_loc (loc, type, arg1, arg0);
11709 if (operand_equal_p (arg0, arg1, 0))
11710 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11711
11712 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11713 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11714 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11715 || (TREE_CODE (arg0) == EQ_EXPR
11716 && integer_zerop (TREE_OPERAND (arg0, 1))))
11717 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11718 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11719
11720 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11721 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11722 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11723 || (TREE_CODE (arg1) == EQ_EXPR
11724 && integer_zerop (TREE_OPERAND (arg1, 1))))
11725 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11726 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11727
11728 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11729 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11730 && TREE_CODE (arg1) == INTEGER_CST
11731 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11732 {
11733 tree tmp1 = fold_convert_loc (loc, type, arg1);
11734 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11735 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11736 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11737 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11738 return
11739 fold_convert_loc (loc, type,
11740 fold_build2_loc (loc, BIT_IOR_EXPR,
11741 type, tmp2, tmp3));
11742 }
11743
11744 /* (X | Y) & Y is (X, Y). */
11745 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11746 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11747 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11748 /* (X | Y) & X is (Y, X). */
11749 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11751 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11752 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11753 /* X & (X | Y) is (Y, X). */
11754 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11756 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11757 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11758 /* X & (Y | X) is (Y, X). */
11759 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11760 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11761 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11762 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11763
11764 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11765 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11766 && integer_onep (TREE_OPERAND (arg0, 1))
11767 && integer_onep (arg1))
11768 {
11769 tree tem2;
11770 tem = TREE_OPERAND (arg0, 0);
11771 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11772 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11773 tem, tem2);
11774 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11775 build_zero_cst (TREE_TYPE (tem)));
11776 }
11777 /* Fold ~X & 1 as (X & 1) == 0. */
11778 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11779 && integer_onep (arg1))
11780 {
11781 tree tem2;
11782 tem = TREE_OPERAND (arg0, 0);
11783 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11784 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11785 tem, tem2);
11786 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11787 build_zero_cst (TREE_TYPE (tem)));
11788 }
11789 /* Fold !X & 1 as X == 0. */
11790 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11791 && integer_onep (arg1))
11792 {
11793 tem = TREE_OPERAND (arg0, 0);
11794 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11795 build_zero_cst (TREE_TYPE (tem)));
11796 }
11797
11798 /* Fold (X ^ Y) & Y as ~X & Y. */
11799 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11800 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11801 {
11802 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11803 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11804 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11805 fold_convert_loc (loc, type, arg1));
11806 }
11807 /* Fold (X ^ Y) & X as ~Y & X. */
11808 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11809 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11810 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11811 {
11812 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11813 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11814 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11815 fold_convert_loc (loc, type, arg1));
11816 }
11817 /* Fold X & (X ^ Y) as X & ~Y. */
11818 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11819 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11820 {
11821 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11822 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11823 fold_convert_loc (loc, type, arg0),
11824 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11825 }
11826 /* Fold X & (Y ^ X) as ~Y & X. */
11827 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11828 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11829 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11830 {
11831 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11832 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11833 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11834 fold_convert_loc (loc, type, arg0));
11835 }
11836
11837 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11838 multiple of 1 << CST. */
11839 if (TREE_CODE (arg1) == INTEGER_CST)
11840 {
11841 double_int cst1 = tree_to_double_int (arg1);
11842 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11843 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11844 if ((cst1 & ncst1) == ncst1
11845 && multiple_of_p (type, arg0,
11846 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11847 return fold_convert_loc (loc, type, arg0);
11848 }
11849
11850 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11851 bits from CST2. */
11852 if (TREE_CODE (arg1) == INTEGER_CST
11853 && TREE_CODE (arg0) == MULT_EXPR
11854 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11855 {
11856 double_int masked
11857 = mask_with_tz (type, tree_to_double_int (arg1),
11858 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11859
11860 if (masked.is_zero ())
11861 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11862 arg0, arg1);
11863 else if (masked != tree_to_double_int (arg1))
11864 return fold_build2_loc (loc, code, type, op0,
11865 double_int_to_tree (type, masked));
11866 }
11867
11868 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11869 ((A & N) + B) & M -> (A + B) & M
11870 Similarly if (N & M) == 0,
11871 ((A | N) + B) & M -> (A + B) & M
11872 and for - instead of + (or unary - instead of +)
11873 and/or ^ instead of |.
11874 If B is constant and (B & M) == 0, fold into A & M. */
11875 if (tree_fits_uhwi_p (arg1))
11876 {
11877 unsigned HOST_WIDE_INT cst1 = tree_to_uhwi (arg1);
11878 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11879 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11880 && (TREE_CODE (arg0) == PLUS_EXPR
11881 || TREE_CODE (arg0) == MINUS_EXPR
11882 || TREE_CODE (arg0) == NEGATE_EXPR)
11883 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11884 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11885 {
11886 tree pmop[2];
11887 int which = 0;
11888 unsigned HOST_WIDE_INT cst0;
11889
11890 /* Now we know that arg0 is (C + D) or (C - D) or
11891 -C and arg1 (M) is == (1LL << cst) - 1.
11892 Store C into PMOP[0] and D into PMOP[1]. */
11893 pmop[0] = TREE_OPERAND (arg0, 0);
11894 pmop[1] = NULL;
11895 if (TREE_CODE (arg0) != NEGATE_EXPR)
11896 {
11897 pmop[1] = TREE_OPERAND (arg0, 1);
11898 which = 1;
11899 }
11900
11901 if (!tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11902 || (tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (arg0)))
11903 & cst1) != cst1)
11904 which = -1;
11905
11906 for (; which >= 0; which--)
11907 switch (TREE_CODE (pmop[which]))
11908 {
11909 case BIT_AND_EXPR:
11910 case BIT_IOR_EXPR:
11911 case BIT_XOR_EXPR:
11912 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11913 != INTEGER_CST)
11914 break;
11915 /* tree_to_[su]hwi not used, because we don't care about
11916 the upper bits. */
11917 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11918 cst0 &= cst1;
11919 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11920 {
11921 if (cst0 != cst1)
11922 break;
11923 }
11924 else if (cst0 != 0)
11925 break;
11926 /* If C or D is of the form (A & N) where
11927 (N & M) == M, or of the form (A | N) or
11928 (A ^ N) where (N & M) == 0, replace it with A. */
11929 pmop[which] = TREE_OPERAND (pmop[which], 0);
11930 break;
11931 case INTEGER_CST:
11932 /* If C or D is a N where (N & M) == 0, it can be
11933 omitted (assumed 0). */
11934 if ((TREE_CODE (arg0) == PLUS_EXPR
11935 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11936 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11937 pmop[which] = NULL;
11938 break;
11939 default:
11940 break;
11941 }
11942
11943 /* Only build anything new if we optimized one or both arguments
11944 above. */
11945 if (pmop[0] != TREE_OPERAND (arg0, 0)
11946 || (TREE_CODE (arg0) != NEGATE_EXPR
11947 && pmop[1] != TREE_OPERAND (arg0, 1)))
11948 {
11949 tree utype = TREE_TYPE (arg0);
11950 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11951 {
11952 /* Perform the operations in a type that has defined
11953 overflow behavior. */
11954 utype = unsigned_type_for (TREE_TYPE (arg0));
11955 if (pmop[0] != NULL)
11956 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11957 if (pmop[1] != NULL)
11958 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11959 }
11960
11961 if (TREE_CODE (arg0) == NEGATE_EXPR)
11962 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11963 else if (TREE_CODE (arg0) == PLUS_EXPR)
11964 {
11965 if (pmop[0] != NULL && pmop[1] != NULL)
11966 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11967 pmop[0], pmop[1]);
11968 else if (pmop[0] != NULL)
11969 tem = pmop[0];
11970 else if (pmop[1] != NULL)
11971 tem = pmop[1];
11972 else
11973 return build_int_cst (type, 0);
11974 }
11975 else if (pmop[0] == NULL)
11976 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11977 else
11978 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11979 pmop[0], pmop[1]);
11980 /* TEM is now the new binary +, - or unary - replacement. */
11981 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11982 fold_convert_loc (loc, utype, arg1));
11983 return fold_convert_loc (loc, type, tem);
11984 }
11985 }
11986 }
11987
11988 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11989 if (t1 != NULL_TREE)
11990 return t1;
11991 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11992 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11993 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11994 {
11995 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11996
11997 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11998 && (~TREE_INT_CST_LOW (arg1)
11999 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
12000 return
12001 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12002 }
12003
12004 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
12005
12006 This results in more efficient code for machines without a NOR
12007 instruction. Combine will canonicalize to the first form
12008 which will allow use of NOR instructions provided by the
12009 backend if they exist. */
12010 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12011 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12012 {
12013 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12014 build2 (BIT_IOR_EXPR, type,
12015 fold_convert_loc (loc, type,
12016 TREE_OPERAND (arg0, 0)),
12017 fold_convert_loc (loc, type,
12018 TREE_OPERAND (arg1, 0))));
12019 }
12020
12021 /* If arg0 is derived from the address of an object or function, we may
12022 be able to fold this expression using the object or function's
12023 alignment. */
12024 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
12025 {
12026 unsigned HOST_WIDE_INT modulus, residue;
12027 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
12028
12029 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12030 integer_onep (arg1));
12031
12032 /* This works because modulus is a power of 2. If this weren't the
12033 case, we'd have to replace it by its greatest power-of-2
12034 divisor: modulus & -modulus. */
12035 if (low < modulus)
12036 return build_int_cst (type, residue & low);
12037 }
12038
12039 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12040 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12041 if the new mask might be further optimized. */
12042 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12043 || TREE_CODE (arg0) == RSHIFT_EXPR)
12044 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12045 && TREE_CODE (arg1) == INTEGER_CST
12046 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12047 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
12048 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12049 < TYPE_PRECISION (TREE_TYPE (arg0))))
12050 {
12051 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12052 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12053 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12054 tree shift_type = TREE_TYPE (arg0);
12055
12056 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12057 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12058 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12059 && TYPE_PRECISION (TREE_TYPE (arg0))
12060 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
12061 {
12062 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12063 tree arg00 = TREE_OPERAND (arg0, 0);
12064 /* See if more bits can be proven as zero because of
12065 zero extension. */
12066 if (TREE_CODE (arg00) == NOP_EXPR
12067 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12068 {
12069 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12070 if (TYPE_PRECISION (inner_type)
12071 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
12072 && TYPE_PRECISION (inner_type) < prec)
12073 {
12074 prec = TYPE_PRECISION (inner_type);
12075 /* See if we can shorten the right shift. */
12076 if (shiftc < prec)
12077 shift_type = inner_type;
12078 }
12079 }
12080 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12081 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12082 zerobits <<= prec - shiftc;
12083 /* For arithmetic shift if sign bit could be set, zerobits
12084 can contain actually sign bits, so no transformation is
12085 possible, unless MASK masks them all away. In that
12086 case the shift needs to be converted into logical shift. */
12087 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12088 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12089 {
12090 if ((mask & zerobits) == 0)
12091 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12092 else
12093 zerobits = 0;
12094 }
12095 }
12096
12097 /* ((X << 16) & 0xff00) is (X, 0). */
12098 if ((mask & zerobits) == mask)
12099 return omit_one_operand_loc (loc, type,
12100 build_int_cst (type, 0), arg0);
12101
12102 newmask = mask | zerobits;
12103 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12104 {
12105 /* Only do the transformation if NEWMASK is some integer
12106 mode's mask. */
12107 for (prec = BITS_PER_UNIT;
12108 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12109 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12110 break;
12111 if (prec < HOST_BITS_PER_WIDE_INT
12112 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12113 {
12114 tree newmaskt;
12115
12116 if (shift_type != TREE_TYPE (arg0))
12117 {
12118 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12119 fold_convert_loc (loc, shift_type,
12120 TREE_OPERAND (arg0, 0)),
12121 TREE_OPERAND (arg0, 1));
12122 tem = fold_convert_loc (loc, type, tem);
12123 }
12124 else
12125 tem = op0;
12126 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12127 if (!tree_int_cst_equal (newmaskt, arg1))
12128 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12129 }
12130 }
12131 }
12132
12133 goto associate;
12134
12135 case RDIV_EXPR:
12136 /* Don't touch a floating-point divide by zero unless the mode
12137 of the constant can represent infinity. */
12138 if (TREE_CODE (arg1) == REAL_CST
12139 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12140 && real_zerop (arg1))
12141 return NULL_TREE;
12142
12143 /* Optimize A / A to 1.0 if we don't care about
12144 NaNs or Infinities. Skip the transformation
12145 for non-real operands. */
12146 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12147 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12148 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12149 && operand_equal_p (arg0, arg1, 0))
12150 {
12151 tree r = build_real (TREE_TYPE (arg0), dconst1);
12152
12153 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12154 }
12155
12156 /* The complex version of the above A / A optimization. */
12157 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12158 && operand_equal_p (arg0, arg1, 0))
12159 {
12160 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12161 if (! HONOR_NANS (TYPE_MODE (elem_type))
12162 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12163 {
12164 tree r = build_real (elem_type, dconst1);
12165 /* omit_two_operands will call fold_convert for us. */
12166 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12167 }
12168 }
12169
12170 /* (-A) / (-B) -> A / B */
12171 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12172 return fold_build2_loc (loc, RDIV_EXPR, type,
12173 TREE_OPERAND (arg0, 0),
12174 negate_expr (arg1));
12175 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12176 return fold_build2_loc (loc, RDIV_EXPR, type,
12177 negate_expr (arg0),
12178 TREE_OPERAND (arg1, 0));
12179
12180 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12181 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12182 && real_onep (arg1))
12183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12184
12185 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12186 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12187 && real_minus_onep (arg1))
12188 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12189 negate_expr (arg0)));
12190
12191 /* If ARG1 is a constant, we can convert this to a multiply by the
12192 reciprocal. This does not have the same rounding properties,
12193 so only do this if -freciprocal-math. We can actually
12194 always safely do it if ARG1 is a power of two, but it's hard to
12195 tell if it is or not in a portable manner. */
12196 if (optimize
12197 && (TREE_CODE (arg1) == REAL_CST
12198 || (TREE_CODE (arg1) == COMPLEX_CST
12199 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12200 || (TREE_CODE (arg1) == VECTOR_CST
12201 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12202 {
12203 if (flag_reciprocal_math
12204 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12205 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12206 /* Find the reciprocal if optimizing and the result is exact.
12207 TODO: Complex reciprocal not implemented. */
12208 if (TREE_CODE (arg1) != COMPLEX_CST)
12209 {
12210 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12211
12212 if (inverse)
12213 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12214 }
12215 }
12216 /* Convert A/B/C to A/(B*C). */
12217 if (flag_reciprocal_math
12218 && TREE_CODE (arg0) == RDIV_EXPR)
12219 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12220 fold_build2_loc (loc, MULT_EXPR, type,
12221 TREE_OPERAND (arg0, 1), arg1));
12222
12223 /* Convert A/(B/C) to (A/B)*C. */
12224 if (flag_reciprocal_math
12225 && TREE_CODE (arg1) == RDIV_EXPR)
12226 return fold_build2_loc (loc, MULT_EXPR, type,
12227 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12228 TREE_OPERAND (arg1, 0)),
12229 TREE_OPERAND (arg1, 1));
12230
12231 /* Convert C1/(X*C2) into (C1/C2)/X. */
12232 if (flag_reciprocal_math
12233 && TREE_CODE (arg1) == MULT_EXPR
12234 && TREE_CODE (arg0) == REAL_CST
12235 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12236 {
12237 tree tem = const_binop (RDIV_EXPR, arg0,
12238 TREE_OPERAND (arg1, 1));
12239 if (tem)
12240 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12241 TREE_OPERAND (arg1, 0));
12242 }
12243
12244 if (flag_unsafe_math_optimizations)
12245 {
12246 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12247 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12248
12249 /* Optimize sin(x)/cos(x) as tan(x). */
12250 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12251 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12252 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12253 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12254 CALL_EXPR_ARG (arg1, 0), 0))
12255 {
12256 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12257
12258 if (tanfn != NULL_TREE)
12259 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12260 }
12261
12262 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12263 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12264 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12265 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12266 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12267 CALL_EXPR_ARG (arg1, 0), 0))
12268 {
12269 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12270
12271 if (tanfn != NULL_TREE)
12272 {
12273 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12274 CALL_EXPR_ARG (arg0, 0));
12275 return fold_build2_loc (loc, RDIV_EXPR, type,
12276 build_real (type, dconst1), tmp);
12277 }
12278 }
12279
12280 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12281 NaNs or Infinities. */
12282 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12283 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12284 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12285 {
12286 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12287 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12288
12289 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12290 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12291 && operand_equal_p (arg00, arg01, 0))
12292 {
12293 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12294
12295 if (cosfn != NULL_TREE)
12296 return build_call_expr_loc (loc, cosfn, 1, arg00);
12297 }
12298 }
12299
12300 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12301 NaNs or Infinities. */
12302 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12303 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12304 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12305 {
12306 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12307 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12308
12309 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12310 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12311 && operand_equal_p (arg00, arg01, 0))
12312 {
12313 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12314
12315 if (cosfn != NULL_TREE)
12316 {
12317 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12318 return fold_build2_loc (loc, RDIV_EXPR, type,
12319 build_real (type, dconst1),
12320 tmp);
12321 }
12322 }
12323 }
12324
12325 /* Optimize pow(x,c)/x as pow(x,c-1). */
12326 if (fcode0 == BUILT_IN_POW
12327 || fcode0 == BUILT_IN_POWF
12328 || fcode0 == BUILT_IN_POWL)
12329 {
12330 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12331 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12332 if (TREE_CODE (arg01) == REAL_CST
12333 && !TREE_OVERFLOW (arg01)
12334 && operand_equal_p (arg1, arg00, 0))
12335 {
12336 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12337 REAL_VALUE_TYPE c;
12338 tree arg;
12339
12340 c = TREE_REAL_CST (arg01);
12341 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12342 arg = build_real (type, c);
12343 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12344 }
12345 }
12346
12347 /* Optimize a/root(b/c) into a*root(c/b). */
12348 if (BUILTIN_ROOT_P (fcode1))
12349 {
12350 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12351
12352 if (TREE_CODE (rootarg) == RDIV_EXPR)
12353 {
12354 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12355 tree b = TREE_OPERAND (rootarg, 0);
12356 tree c = TREE_OPERAND (rootarg, 1);
12357
12358 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12359
12360 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12361 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12362 }
12363 }
12364
12365 /* Optimize x/expN(y) into x*expN(-y). */
12366 if (BUILTIN_EXPONENT_P (fcode1))
12367 {
12368 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12369 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12370 arg1 = build_call_expr_loc (loc,
12371 expfn, 1,
12372 fold_convert_loc (loc, type, arg));
12373 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12374 }
12375
12376 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12377 if (fcode1 == BUILT_IN_POW
12378 || fcode1 == BUILT_IN_POWF
12379 || fcode1 == BUILT_IN_POWL)
12380 {
12381 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12382 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12383 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12384 tree neg11 = fold_convert_loc (loc, type,
12385 negate_expr (arg11));
12386 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12387 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12388 }
12389 }
12390 return NULL_TREE;
12391
12392 case TRUNC_DIV_EXPR:
12393 /* Optimize (X & (-A)) / A where A is a power of 2,
12394 to X >> log2(A) */
12395 if (TREE_CODE (arg0) == BIT_AND_EXPR
12396 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12397 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12398 {
12399 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12400 arg1, TREE_OPERAND (arg0, 1));
12401 if (sum && integer_zerop (sum)) {
12402 unsigned long pow2;
12403
12404 if (TREE_INT_CST_LOW (arg1))
12405 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12406 else
12407 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12408 + HOST_BITS_PER_WIDE_INT;
12409
12410 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12411 TREE_OPERAND (arg0, 0),
12412 build_int_cst (integer_type_node, pow2));
12413 }
12414 }
12415
12416 /* Fall through */
12417
12418 case FLOOR_DIV_EXPR:
12419 /* Simplify A / (B << N) where A and B are positive and B is
12420 a power of 2, to A >> (N + log2(B)). */
12421 strict_overflow_p = false;
12422 if (TREE_CODE (arg1) == LSHIFT_EXPR
12423 && (TYPE_UNSIGNED (type)
12424 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12425 {
12426 tree sval = TREE_OPERAND (arg1, 0);
12427 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12428 {
12429 tree sh_cnt = TREE_OPERAND (arg1, 1);
12430 unsigned long pow2;
12431
12432 if (TREE_INT_CST_LOW (sval))
12433 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12434 else
12435 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12436 + HOST_BITS_PER_WIDE_INT;
12437
12438 if (strict_overflow_p)
12439 fold_overflow_warning (("assuming signed overflow does not "
12440 "occur when simplifying A / (B << N)"),
12441 WARN_STRICT_OVERFLOW_MISC);
12442
12443 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12444 sh_cnt,
12445 build_int_cst (TREE_TYPE (sh_cnt),
12446 pow2));
12447 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12448 fold_convert_loc (loc, type, arg0), sh_cnt);
12449 }
12450 }
12451
12452 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12453 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12454 if (INTEGRAL_TYPE_P (type)
12455 && TYPE_UNSIGNED (type)
12456 && code == FLOOR_DIV_EXPR)
12457 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12458
12459 /* Fall through */
12460
12461 case ROUND_DIV_EXPR:
12462 case CEIL_DIV_EXPR:
12463 case EXACT_DIV_EXPR:
12464 if (integer_onep (arg1))
12465 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12466 if (integer_zerop (arg1))
12467 return NULL_TREE;
12468 /* X / -1 is -X. */
12469 if (!TYPE_UNSIGNED (type)
12470 && TREE_CODE (arg1) == INTEGER_CST
12471 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12472 && TREE_INT_CST_HIGH (arg1) == -1)
12473 return fold_convert_loc (loc, type, negate_expr (arg0));
12474
12475 /* Convert -A / -B to A / B when the type is signed and overflow is
12476 undefined. */
12477 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12478 && TREE_CODE (arg0) == NEGATE_EXPR
12479 && negate_expr_p (arg1))
12480 {
12481 if (INTEGRAL_TYPE_P (type))
12482 fold_overflow_warning (("assuming signed overflow does not occur "
12483 "when distributing negation across "
12484 "division"),
12485 WARN_STRICT_OVERFLOW_MISC);
12486 return fold_build2_loc (loc, code, type,
12487 fold_convert_loc (loc, type,
12488 TREE_OPERAND (arg0, 0)),
12489 fold_convert_loc (loc, type,
12490 negate_expr (arg1)));
12491 }
12492 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12493 && TREE_CODE (arg1) == NEGATE_EXPR
12494 && negate_expr_p (arg0))
12495 {
12496 if (INTEGRAL_TYPE_P (type))
12497 fold_overflow_warning (("assuming signed overflow does not occur "
12498 "when distributing negation across "
12499 "division"),
12500 WARN_STRICT_OVERFLOW_MISC);
12501 return fold_build2_loc (loc, code, type,
12502 fold_convert_loc (loc, type,
12503 negate_expr (arg0)),
12504 fold_convert_loc (loc, type,
12505 TREE_OPERAND (arg1, 0)));
12506 }
12507
12508 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12509 operation, EXACT_DIV_EXPR.
12510
12511 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12512 At one time others generated faster code, it's not clear if they do
12513 after the last round to changes to the DIV code in expmed.c. */
12514 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12515 && multiple_of_p (type, arg0, arg1))
12516 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12517
12518 strict_overflow_p = false;
12519 if (TREE_CODE (arg1) == INTEGER_CST
12520 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12521 &strict_overflow_p)))
12522 {
12523 if (strict_overflow_p)
12524 fold_overflow_warning (("assuming signed overflow does not occur "
12525 "when simplifying division"),
12526 WARN_STRICT_OVERFLOW_MISC);
12527 return fold_convert_loc (loc, type, tem);
12528 }
12529
12530 return NULL_TREE;
12531
12532 case CEIL_MOD_EXPR:
12533 case FLOOR_MOD_EXPR:
12534 case ROUND_MOD_EXPR:
12535 case TRUNC_MOD_EXPR:
12536 /* X % 1 is always zero, but be sure to preserve any side
12537 effects in X. */
12538 if (integer_onep (arg1))
12539 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12540
12541 /* X % 0, return X % 0 unchanged so that we can get the
12542 proper warnings and errors. */
12543 if (integer_zerop (arg1))
12544 return NULL_TREE;
12545
12546 /* 0 % X is always zero, but be sure to preserve any side
12547 effects in X. Place this after checking for X == 0. */
12548 if (integer_zerop (arg0))
12549 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12550
12551 /* X % -1 is zero. */
12552 if (!TYPE_UNSIGNED (type)
12553 && TREE_CODE (arg1) == INTEGER_CST
12554 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12555 && TREE_INT_CST_HIGH (arg1) == -1)
12556 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12557
12558 /* X % -C is the same as X % C. */
12559 if (code == TRUNC_MOD_EXPR
12560 && !TYPE_UNSIGNED (type)
12561 && TREE_CODE (arg1) == INTEGER_CST
12562 && !TREE_OVERFLOW (arg1)
12563 && TREE_INT_CST_HIGH (arg1) < 0
12564 && !TYPE_OVERFLOW_TRAPS (type)
12565 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12566 && !sign_bit_p (arg1, arg1))
12567 return fold_build2_loc (loc, code, type,
12568 fold_convert_loc (loc, type, arg0),
12569 fold_convert_loc (loc, type,
12570 negate_expr (arg1)));
12571
12572 /* X % -Y is the same as X % Y. */
12573 if (code == TRUNC_MOD_EXPR
12574 && !TYPE_UNSIGNED (type)
12575 && TREE_CODE (arg1) == NEGATE_EXPR
12576 && !TYPE_OVERFLOW_TRAPS (type))
12577 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12578 fold_convert_loc (loc, type,
12579 TREE_OPERAND (arg1, 0)));
12580
12581 strict_overflow_p = false;
12582 if (TREE_CODE (arg1) == INTEGER_CST
12583 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12584 &strict_overflow_p)))
12585 {
12586 if (strict_overflow_p)
12587 fold_overflow_warning (("assuming signed overflow does not occur "
12588 "when simplifying modulus"),
12589 WARN_STRICT_OVERFLOW_MISC);
12590 return fold_convert_loc (loc, type, tem);
12591 }
12592
12593 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12594 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12595 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12596 && (TYPE_UNSIGNED (type)
12597 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12598 {
12599 tree c = arg1;
12600 /* Also optimize A % (C << N) where C is a power of 2,
12601 to A & ((C << N) - 1). */
12602 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12603 c = TREE_OPERAND (arg1, 0);
12604
12605 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12606 {
12607 tree mask
12608 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12609 build_int_cst (TREE_TYPE (arg1), 1));
12610 if (strict_overflow_p)
12611 fold_overflow_warning (("assuming signed overflow does not "
12612 "occur when simplifying "
12613 "X % (power of two)"),
12614 WARN_STRICT_OVERFLOW_MISC);
12615 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12616 fold_convert_loc (loc, type, arg0),
12617 fold_convert_loc (loc, type, mask));
12618 }
12619 }
12620
12621 return NULL_TREE;
12622
12623 case LROTATE_EXPR:
12624 case RROTATE_EXPR:
12625 if (integer_all_onesp (arg0))
12626 return omit_one_operand_loc (loc, type, arg0, arg1);
12627 goto shift;
12628
12629 case RSHIFT_EXPR:
12630 /* Optimize -1 >> x for arithmetic right shifts. */
12631 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12632 && tree_expr_nonnegative_p (arg1))
12633 return omit_one_operand_loc (loc, type, arg0, arg1);
12634 /* ... fall through ... */
12635
12636 case LSHIFT_EXPR:
12637 shift:
12638 if (integer_zerop (arg1))
12639 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12640 if (integer_zerop (arg0))
12641 return omit_one_operand_loc (loc, type, arg0, arg1);
12642
12643 /* Prefer vector1 << scalar to vector1 << vector2
12644 if vector2 is uniform. */
12645 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12646 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12647 return fold_build2_loc (loc, code, type, op0, tem);
12648
12649 /* Since negative shift count is not well-defined,
12650 don't try to compute it in the compiler. */
12651 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12652 return NULL_TREE;
12653
12654 prec = element_precision (type);
12655
12656 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12657 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12658 && tree_to_uhwi (arg1) < prec
12659 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12660 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12661 {
12662 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12663 + tree_to_uhwi (arg1));
12664
12665 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12666 being well defined. */
12667 if (low >= prec)
12668 {
12669 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12670 low = low % prec;
12671 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12672 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12673 TREE_OPERAND (arg0, 0));
12674 else
12675 low = prec - 1;
12676 }
12677
12678 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12679 build_int_cst (TREE_TYPE (arg1), low));
12680 }
12681
12682 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12683 into x & ((unsigned)-1 >> c) for unsigned types. */
12684 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12685 || (TYPE_UNSIGNED (type)
12686 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12687 && tree_fits_uhwi_p (arg1)
12688 && tree_to_uhwi (arg1) < prec
12689 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12690 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12691 {
12692 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12693 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12694 tree lshift;
12695 tree arg00;
12696
12697 if (low0 == low1)
12698 {
12699 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12700
12701 lshift = build_minus_one_cst (type);
12702 lshift = const_binop (code, lshift, arg1);
12703
12704 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12705 }
12706 }
12707
12708 /* Rewrite an LROTATE_EXPR by a constant into an
12709 RROTATE_EXPR by a new constant. */
12710 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12711 {
12712 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12713 tem = const_binop (MINUS_EXPR, tem, arg1);
12714 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12715 }
12716
12717 /* If we have a rotate of a bit operation with the rotate count and
12718 the second operand of the bit operation both constant,
12719 permute the two operations. */
12720 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12721 && (TREE_CODE (arg0) == BIT_AND_EXPR
12722 || TREE_CODE (arg0) == BIT_IOR_EXPR
12723 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12725 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12726 fold_build2_loc (loc, code, type,
12727 TREE_OPERAND (arg0, 0), arg1),
12728 fold_build2_loc (loc, code, type,
12729 TREE_OPERAND (arg0, 1), arg1));
12730
12731 /* Two consecutive rotates adding up to the precision of the
12732 type can be ignored. */
12733 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12734 && TREE_CODE (arg0) == RROTATE_EXPR
12735 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12736 && TREE_INT_CST_HIGH (arg1) == 0
12737 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12738 && ((TREE_INT_CST_LOW (arg1)
12739 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12740 == prec))
12741 return TREE_OPERAND (arg0, 0);
12742
12743 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12744 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12745 if the latter can be further optimized. */
12746 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12747 && TREE_CODE (arg0) == BIT_AND_EXPR
12748 && TREE_CODE (arg1) == INTEGER_CST
12749 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12750 {
12751 tree mask = fold_build2_loc (loc, code, type,
12752 fold_convert_loc (loc, type,
12753 TREE_OPERAND (arg0, 1)),
12754 arg1);
12755 tree shift = fold_build2_loc (loc, code, type,
12756 fold_convert_loc (loc, type,
12757 TREE_OPERAND (arg0, 0)),
12758 arg1);
12759 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12760 if (tem)
12761 return tem;
12762 }
12763
12764 return NULL_TREE;
12765
12766 case MIN_EXPR:
12767 if (operand_equal_p (arg0, arg1, 0))
12768 return omit_one_operand_loc (loc, type, arg0, arg1);
12769 if (INTEGRAL_TYPE_P (type)
12770 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12771 return omit_one_operand_loc (loc, type, arg1, arg0);
12772 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12773 if (tem)
12774 return tem;
12775 goto associate;
12776
12777 case MAX_EXPR:
12778 if (operand_equal_p (arg0, arg1, 0))
12779 return omit_one_operand_loc (loc, type, arg0, arg1);
12780 if (INTEGRAL_TYPE_P (type)
12781 && TYPE_MAX_VALUE (type)
12782 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12783 return omit_one_operand_loc (loc, type, arg1, arg0);
12784 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12785 if (tem)
12786 return tem;
12787 goto associate;
12788
12789 case TRUTH_ANDIF_EXPR:
12790 /* Note that the operands of this must be ints
12791 and their values must be 0 or 1.
12792 ("true" is a fixed value perhaps depending on the language.) */
12793 /* If first arg is constant zero, return it. */
12794 if (integer_zerop (arg0))
12795 return fold_convert_loc (loc, type, arg0);
12796 case TRUTH_AND_EXPR:
12797 /* If either arg is constant true, drop it. */
12798 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12799 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12800 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12801 /* Preserve sequence points. */
12802 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12803 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12804 /* If second arg is constant zero, result is zero, but first arg
12805 must be evaluated. */
12806 if (integer_zerop (arg1))
12807 return omit_one_operand_loc (loc, type, arg1, arg0);
12808 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12809 case will be handled here. */
12810 if (integer_zerop (arg0))
12811 return omit_one_operand_loc (loc, type, arg0, arg1);
12812
12813 /* !X && X is always false. */
12814 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12815 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12816 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12817 /* X && !X is always false. */
12818 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12819 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12820 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12821
12822 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12823 means A >= Y && A != MAX, but in this case we know that
12824 A < X <= MAX. */
12825
12826 if (!TREE_SIDE_EFFECTS (arg0)
12827 && !TREE_SIDE_EFFECTS (arg1))
12828 {
12829 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12830 if (tem && !operand_equal_p (tem, arg0, 0))
12831 return fold_build2_loc (loc, code, type, tem, arg1);
12832
12833 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12834 if (tem && !operand_equal_p (tem, arg1, 0))
12835 return fold_build2_loc (loc, code, type, arg0, tem);
12836 }
12837
12838 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12839 != NULL_TREE)
12840 return tem;
12841
12842 return NULL_TREE;
12843
12844 case TRUTH_ORIF_EXPR:
12845 /* Note that the operands of this must be ints
12846 and their values must be 0 or true.
12847 ("true" is a fixed value perhaps depending on the language.) */
12848 /* If first arg is constant true, return it. */
12849 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12850 return fold_convert_loc (loc, type, arg0);
12851 case TRUTH_OR_EXPR:
12852 /* If either arg is constant zero, drop it. */
12853 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12854 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12855 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12856 /* Preserve sequence points. */
12857 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12858 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12859 /* If second arg is constant true, result is true, but we must
12860 evaluate first arg. */
12861 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12862 return omit_one_operand_loc (loc, type, arg1, arg0);
12863 /* Likewise for first arg, but note this only occurs here for
12864 TRUTH_OR_EXPR. */
12865 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12866 return omit_one_operand_loc (loc, type, arg0, arg1);
12867
12868 /* !X || X is always true. */
12869 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12870 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12871 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12872 /* X || !X is always true. */
12873 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12874 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12875 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12876
12877 /* (X && !Y) || (!X && Y) is X ^ Y */
12878 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12879 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12880 {
12881 tree a0, a1, l0, l1, n0, n1;
12882
12883 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12884 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12885
12886 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12887 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12888
12889 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12890 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12891
12892 if ((operand_equal_p (n0, a0, 0)
12893 && operand_equal_p (n1, a1, 0))
12894 || (operand_equal_p (n0, a1, 0)
12895 && operand_equal_p (n1, a0, 0)))
12896 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12897 }
12898
12899 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12900 != NULL_TREE)
12901 return tem;
12902
12903 return NULL_TREE;
12904
12905 case TRUTH_XOR_EXPR:
12906 /* If the second arg is constant zero, drop it. */
12907 if (integer_zerop (arg1))
12908 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12909 /* If the second arg is constant true, this is a logical inversion. */
12910 if (integer_onep (arg1))
12911 {
12912 tem = invert_truthvalue_loc (loc, arg0);
12913 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12914 }
12915 /* Identical arguments cancel to zero. */
12916 if (operand_equal_p (arg0, arg1, 0))
12917 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12918
12919 /* !X ^ X is always true. */
12920 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12921 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12922 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12923
12924 /* X ^ !X is always true. */
12925 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12926 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12927 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12928
12929 return NULL_TREE;
12930
12931 case EQ_EXPR:
12932 case NE_EXPR:
12933 STRIP_NOPS (arg0);
12934 STRIP_NOPS (arg1);
12935
12936 tem = fold_comparison (loc, code, type, op0, op1);
12937 if (tem != NULL_TREE)
12938 return tem;
12939
12940 /* bool_var != 0 becomes bool_var. */
12941 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12942 && code == NE_EXPR)
12943 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12944
12945 /* bool_var == 1 becomes bool_var. */
12946 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12947 && code == EQ_EXPR)
12948 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12949
12950 /* bool_var != 1 becomes !bool_var. */
12951 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12952 && code == NE_EXPR)
12953 return fold_convert_loc (loc, type,
12954 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12955 TREE_TYPE (arg0), arg0));
12956
12957 /* bool_var == 0 becomes !bool_var. */
12958 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12959 && code == EQ_EXPR)
12960 return fold_convert_loc (loc, type,
12961 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12962 TREE_TYPE (arg0), arg0));
12963
12964 /* !exp != 0 becomes !exp */
12965 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12966 && code == NE_EXPR)
12967 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12968
12969 /* If this is an equality comparison of the address of two non-weak,
12970 unaliased symbols neither of which are extern (since we do not
12971 have access to attributes for externs), then we know the result. */
12972 if (TREE_CODE (arg0) == ADDR_EXPR
12973 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12974 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12975 && ! lookup_attribute ("alias",
12976 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12977 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12978 && TREE_CODE (arg1) == ADDR_EXPR
12979 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12980 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12981 && ! lookup_attribute ("alias",
12982 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12983 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12984 {
12985 /* We know that we're looking at the address of two
12986 non-weak, unaliased, static _DECL nodes.
12987
12988 It is both wasteful and incorrect to call operand_equal_p
12989 to compare the two ADDR_EXPR nodes. It is wasteful in that
12990 all we need to do is test pointer equality for the arguments
12991 to the two ADDR_EXPR nodes. It is incorrect to use
12992 operand_equal_p as that function is NOT equivalent to a
12993 C equality test. It can in fact return false for two
12994 objects which would test as equal using the C equality
12995 operator. */
12996 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12997 return constant_boolean_node (equal
12998 ? code == EQ_EXPR : code != EQ_EXPR,
12999 type);
13000 }
13001
13002 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
13003 a MINUS_EXPR of a constant, we can convert it into a comparison with
13004 a revised constant as long as no overflow occurs. */
13005 if (TREE_CODE (arg1) == INTEGER_CST
13006 && (TREE_CODE (arg0) == PLUS_EXPR
13007 || TREE_CODE (arg0) == MINUS_EXPR)
13008 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13009 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13010 ? MINUS_EXPR : PLUS_EXPR,
13011 fold_convert_loc (loc, TREE_TYPE (arg0),
13012 arg1),
13013 TREE_OPERAND (arg0, 1)))
13014 && !TREE_OVERFLOW (tem))
13015 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13016
13017 /* Similarly for a NEGATE_EXPR. */
13018 if (TREE_CODE (arg0) == NEGATE_EXPR
13019 && TREE_CODE (arg1) == INTEGER_CST
13020 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13021 arg1)))
13022 && TREE_CODE (tem) == INTEGER_CST
13023 && !TREE_OVERFLOW (tem))
13024 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13025
13026 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13027 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13028 && TREE_CODE (arg1) == INTEGER_CST
13029 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13030 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13031 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13032 fold_convert_loc (loc,
13033 TREE_TYPE (arg0),
13034 arg1),
13035 TREE_OPERAND (arg0, 1)));
13036
13037 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13038 if ((TREE_CODE (arg0) == PLUS_EXPR
13039 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13040 || TREE_CODE (arg0) == MINUS_EXPR)
13041 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13042 0)),
13043 arg1, 0)
13044 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13045 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13046 {
13047 tree val = TREE_OPERAND (arg0, 1);
13048 return omit_two_operands_loc (loc, type,
13049 fold_build2_loc (loc, code, type,
13050 val,
13051 build_int_cst (TREE_TYPE (val),
13052 0)),
13053 TREE_OPERAND (arg0, 0), arg1);
13054 }
13055
13056 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13057 if (TREE_CODE (arg0) == MINUS_EXPR
13058 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13059 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13060 1)),
13061 arg1, 0)
13062 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13063 {
13064 return omit_two_operands_loc (loc, type,
13065 code == NE_EXPR
13066 ? boolean_true_node : boolean_false_node,
13067 TREE_OPERAND (arg0, 1), arg1);
13068 }
13069
13070 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13071 for !=. Don't do this for ordered comparisons due to overflow. */
13072 if (TREE_CODE (arg0) == MINUS_EXPR
13073 && integer_zerop (arg1))
13074 return fold_build2_loc (loc, code, type,
13075 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13076
13077 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13078 if (TREE_CODE (arg0) == ABS_EXPR
13079 && (integer_zerop (arg1) || real_zerop (arg1)))
13080 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13081
13082 /* If this is an EQ or NE comparison with zero and ARG0 is
13083 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13084 two operations, but the latter can be done in one less insn
13085 on machines that have only two-operand insns or on which a
13086 constant cannot be the first operand. */
13087 if (TREE_CODE (arg0) == BIT_AND_EXPR
13088 && integer_zerop (arg1))
13089 {
13090 tree arg00 = TREE_OPERAND (arg0, 0);
13091 tree arg01 = TREE_OPERAND (arg0, 1);
13092 if (TREE_CODE (arg00) == LSHIFT_EXPR
13093 && integer_onep (TREE_OPERAND (arg00, 0)))
13094 {
13095 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13096 arg01, TREE_OPERAND (arg00, 1));
13097 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13098 build_int_cst (TREE_TYPE (arg0), 1));
13099 return fold_build2_loc (loc, code, type,
13100 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13101 arg1);
13102 }
13103 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13104 && integer_onep (TREE_OPERAND (arg01, 0)))
13105 {
13106 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13107 arg00, TREE_OPERAND (arg01, 1));
13108 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13109 build_int_cst (TREE_TYPE (arg0), 1));
13110 return fold_build2_loc (loc, code, type,
13111 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13112 arg1);
13113 }
13114 }
13115
13116 /* If this is an NE or EQ comparison of zero against the result of a
13117 signed MOD operation whose second operand is a power of 2, make
13118 the MOD operation unsigned since it is simpler and equivalent. */
13119 if (integer_zerop (arg1)
13120 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13121 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13122 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13123 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13124 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13125 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13126 {
13127 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13128 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13129 fold_convert_loc (loc, newtype,
13130 TREE_OPERAND (arg0, 0)),
13131 fold_convert_loc (loc, newtype,
13132 TREE_OPERAND (arg0, 1)));
13133
13134 return fold_build2_loc (loc, code, type, newmod,
13135 fold_convert_loc (loc, newtype, arg1));
13136 }
13137
13138 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13139 C1 is a valid shift constant, and C2 is a power of two, i.e.
13140 a single bit. */
13141 if (TREE_CODE (arg0) == BIT_AND_EXPR
13142 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13143 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13144 == INTEGER_CST
13145 && integer_pow2p (TREE_OPERAND (arg0, 1))
13146 && integer_zerop (arg1))
13147 {
13148 tree itype = TREE_TYPE (arg0);
13149 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13150 prec = TYPE_PRECISION (itype);
13151
13152 /* Check for a valid shift count. */
13153 if (TREE_INT_CST_HIGH (arg001) == 0
13154 && TREE_INT_CST_LOW (arg001) < prec)
13155 {
13156 tree arg01 = TREE_OPERAND (arg0, 1);
13157 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13158 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13159 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13160 can be rewritten as (X & (C2 << C1)) != 0. */
13161 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13162 {
13163 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13164 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13165 return fold_build2_loc (loc, code, type, tem,
13166 fold_convert_loc (loc, itype, arg1));
13167 }
13168 /* Otherwise, for signed (arithmetic) shifts,
13169 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13170 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13171 else if (!TYPE_UNSIGNED (itype))
13172 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13173 arg000, build_int_cst (itype, 0));
13174 /* Otherwise, of unsigned (logical) shifts,
13175 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13176 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13177 else
13178 return omit_one_operand_loc (loc, type,
13179 code == EQ_EXPR ? integer_one_node
13180 : integer_zero_node,
13181 arg000);
13182 }
13183 }
13184
13185 /* If we have (A & C) == C where C is a power of 2, convert this into
13186 (A & C) != 0. Similarly for NE_EXPR. */
13187 if (TREE_CODE (arg0) == BIT_AND_EXPR
13188 && integer_pow2p (TREE_OPERAND (arg0, 1))
13189 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13190 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13191 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13192 integer_zero_node));
13193
13194 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13195 bit, then fold the expression into A < 0 or A >= 0. */
13196 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13197 if (tem)
13198 return tem;
13199
13200 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13201 Similarly for NE_EXPR. */
13202 if (TREE_CODE (arg0) == BIT_AND_EXPR
13203 && TREE_CODE (arg1) == INTEGER_CST
13204 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13205 {
13206 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13207 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13208 TREE_OPERAND (arg0, 1));
13209 tree dandnotc
13210 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13211 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13212 notc);
13213 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13214 if (integer_nonzerop (dandnotc))
13215 return omit_one_operand_loc (loc, type, rslt, arg0);
13216 }
13217
13218 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13219 Similarly for NE_EXPR. */
13220 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13221 && TREE_CODE (arg1) == INTEGER_CST
13222 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13223 {
13224 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13225 tree candnotd
13226 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13227 TREE_OPERAND (arg0, 1),
13228 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13229 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13230 if (integer_nonzerop (candnotd))
13231 return omit_one_operand_loc (loc, type, rslt, arg0);
13232 }
13233
13234 /* If this is a comparison of a field, we may be able to simplify it. */
13235 if ((TREE_CODE (arg0) == COMPONENT_REF
13236 || TREE_CODE (arg0) == BIT_FIELD_REF)
13237 /* Handle the constant case even without -O
13238 to make sure the warnings are given. */
13239 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13240 {
13241 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13242 if (t1)
13243 return t1;
13244 }
13245
13246 /* Optimize comparisons of strlen vs zero to a compare of the
13247 first character of the string vs zero. To wit,
13248 strlen(ptr) == 0 => *ptr == 0
13249 strlen(ptr) != 0 => *ptr != 0
13250 Other cases should reduce to one of these two (or a constant)
13251 due to the return value of strlen being unsigned. */
13252 if (TREE_CODE (arg0) == CALL_EXPR
13253 && integer_zerop (arg1))
13254 {
13255 tree fndecl = get_callee_fndecl (arg0);
13256
13257 if (fndecl
13258 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13259 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13260 && call_expr_nargs (arg0) == 1
13261 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13262 {
13263 tree iref = build_fold_indirect_ref_loc (loc,
13264 CALL_EXPR_ARG (arg0, 0));
13265 return fold_build2_loc (loc, code, type, iref,
13266 build_int_cst (TREE_TYPE (iref), 0));
13267 }
13268 }
13269
13270 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13271 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13272 if (TREE_CODE (arg0) == RSHIFT_EXPR
13273 && integer_zerop (arg1)
13274 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13275 {
13276 tree arg00 = TREE_OPERAND (arg0, 0);
13277 tree arg01 = TREE_OPERAND (arg0, 1);
13278 tree itype = TREE_TYPE (arg00);
13279 if (TREE_INT_CST_HIGH (arg01) == 0
13280 && TREE_INT_CST_LOW (arg01)
13281 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13282 {
13283 if (TYPE_UNSIGNED (itype))
13284 {
13285 itype = signed_type_for (itype);
13286 arg00 = fold_convert_loc (loc, itype, arg00);
13287 }
13288 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13289 type, arg00, build_zero_cst (itype));
13290 }
13291 }
13292
13293 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13294 if (integer_zerop (arg1)
13295 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13296 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13297 TREE_OPERAND (arg0, 1));
13298
13299 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13300 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13301 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13302 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13303 build_zero_cst (TREE_TYPE (arg0)));
13304 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13305 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13307 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13308 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13309 build_zero_cst (TREE_TYPE (arg0)));
13310
13311 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13312 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13313 && TREE_CODE (arg1) == INTEGER_CST
13314 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13315 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13316 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13317 TREE_OPERAND (arg0, 1), arg1));
13318
13319 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13320 (X & C) == 0 when C is a single bit. */
13321 if (TREE_CODE (arg0) == BIT_AND_EXPR
13322 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13323 && integer_zerop (arg1)
13324 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13325 {
13326 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13327 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13328 TREE_OPERAND (arg0, 1));
13329 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13330 type, tem,
13331 fold_convert_loc (loc, TREE_TYPE (arg0),
13332 arg1));
13333 }
13334
13335 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13336 constant C is a power of two, i.e. a single bit. */
13337 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13338 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13339 && integer_zerop (arg1)
13340 && integer_pow2p (TREE_OPERAND (arg0, 1))
13341 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13342 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13343 {
13344 tree arg00 = TREE_OPERAND (arg0, 0);
13345 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13346 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13347 }
13348
13349 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13350 when is C is a power of two, i.e. a single bit. */
13351 if (TREE_CODE (arg0) == BIT_AND_EXPR
13352 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13353 && integer_zerop (arg1)
13354 && integer_pow2p (TREE_OPERAND (arg0, 1))
13355 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13356 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13357 {
13358 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13359 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13360 arg000, TREE_OPERAND (arg0, 1));
13361 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13362 tem, build_int_cst (TREE_TYPE (tem), 0));
13363 }
13364
13365 if (integer_zerop (arg1)
13366 && tree_expr_nonzero_p (arg0))
13367 {
13368 tree res = constant_boolean_node (code==NE_EXPR, type);
13369 return omit_one_operand_loc (loc, type, res, arg0);
13370 }
13371
13372 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13373 if (TREE_CODE (arg0) == NEGATE_EXPR
13374 && TREE_CODE (arg1) == NEGATE_EXPR)
13375 return fold_build2_loc (loc, code, type,
13376 TREE_OPERAND (arg0, 0),
13377 fold_convert_loc (loc, TREE_TYPE (arg0),
13378 TREE_OPERAND (arg1, 0)));
13379
13380 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13381 if (TREE_CODE (arg0) == BIT_AND_EXPR
13382 && TREE_CODE (arg1) == BIT_AND_EXPR)
13383 {
13384 tree arg00 = TREE_OPERAND (arg0, 0);
13385 tree arg01 = TREE_OPERAND (arg0, 1);
13386 tree arg10 = TREE_OPERAND (arg1, 0);
13387 tree arg11 = TREE_OPERAND (arg1, 1);
13388 tree itype = TREE_TYPE (arg0);
13389
13390 if (operand_equal_p (arg01, arg11, 0))
13391 return fold_build2_loc (loc, code, type,
13392 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13393 fold_build2_loc (loc,
13394 BIT_XOR_EXPR, itype,
13395 arg00, arg10),
13396 arg01),
13397 build_zero_cst (itype));
13398
13399 if (operand_equal_p (arg01, arg10, 0))
13400 return fold_build2_loc (loc, code, type,
13401 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13402 fold_build2_loc (loc,
13403 BIT_XOR_EXPR, itype,
13404 arg00, arg11),
13405 arg01),
13406 build_zero_cst (itype));
13407
13408 if (operand_equal_p (arg00, arg11, 0))
13409 return fold_build2_loc (loc, code, type,
13410 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13411 fold_build2_loc (loc,
13412 BIT_XOR_EXPR, itype,
13413 arg01, arg10),
13414 arg00),
13415 build_zero_cst (itype));
13416
13417 if (operand_equal_p (arg00, arg10, 0))
13418 return fold_build2_loc (loc, code, type,
13419 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13420 fold_build2_loc (loc,
13421 BIT_XOR_EXPR, itype,
13422 arg01, arg11),
13423 arg00),
13424 build_zero_cst (itype));
13425 }
13426
13427 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13428 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13429 {
13430 tree arg00 = TREE_OPERAND (arg0, 0);
13431 tree arg01 = TREE_OPERAND (arg0, 1);
13432 tree arg10 = TREE_OPERAND (arg1, 0);
13433 tree arg11 = TREE_OPERAND (arg1, 1);
13434 tree itype = TREE_TYPE (arg0);
13435
13436 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13437 operand_equal_p guarantees no side-effects so we don't need
13438 to use omit_one_operand on Z. */
13439 if (operand_equal_p (arg01, arg11, 0))
13440 return fold_build2_loc (loc, code, type, arg00,
13441 fold_convert_loc (loc, TREE_TYPE (arg00),
13442 arg10));
13443 if (operand_equal_p (arg01, arg10, 0))
13444 return fold_build2_loc (loc, code, type, arg00,
13445 fold_convert_loc (loc, TREE_TYPE (arg00),
13446 arg11));
13447 if (operand_equal_p (arg00, arg11, 0))
13448 return fold_build2_loc (loc, code, type, arg01,
13449 fold_convert_loc (loc, TREE_TYPE (arg01),
13450 arg10));
13451 if (operand_equal_p (arg00, arg10, 0))
13452 return fold_build2_loc (loc, code, type, arg01,
13453 fold_convert_loc (loc, TREE_TYPE (arg01),
13454 arg11));
13455
13456 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13457 if (TREE_CODE (arg01) == INTEGER_CST
13458 && TREE_CODE (arg11) == INTEGER_CST)
13459 {
13460 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13461 fold_convert_loc (loc, itype, arg11));
13462 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13463 return fold_build2_loc (loc, code, type, tem,
13464 fold_convert_loc (loc, itype, arg10));
13465 }
13466 }
13467
13468 /* Attempt to simplify equality/inequality comparisons of complex
13469 values. Only lower the comparison if the result is known or
13470 can be simplified to a single scalar comparison. */
13471 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13472 || TREE_CODE (arg0) == COMPLEX_CST)
13473 && (TREE_CODE (arg1) == COMPLEX_EXPR
13474 || TREE_CODE (arg1) == COMPLEX_CST))
13475 {
13476 tree real0, imag0, real1, imag1;
13477 tree rcond, icond;
13478
13479 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13480 {
13481 real0 = TREE_OPERAND (arg0, 0);
13482 imag0 = TREE_OPERAND (arg0, 1);
13483 }
13484 else
13485 {
13486 real0 = TREE_REALPART (arg0);
13487 imag0 = TREE_IMAGPART (arg0);
13488 }
13489
13490 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13491 {
13492 real1 = TREE_OPERAND (arg1, 0);
13493 imag1 = TREE_OPERAND (arg1, 1);
13494 }
13495 else
13496 {
13497 real1 = TREE_REALPART (arg1);
13498 imag1 = TREE_IMAGPART (arg1);
13499 }
13500
13501 rcond = fold_binary_loc (loc, code, type, real0, real1);
13502 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13503 {
13504 if (integer_zerop (rcond))
13505 {
13506 if (code == EQ_EXPR)
13507 return omit_two_operands_loc (loc, type, boolean_false_node,
13508 imag0, imag1);
13509 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13510 }
13511 else
13512 {
13513 if (code == NE_EXPR)
13514 return omit_two_operands_loc (loc, type, boolean_true_node,
13515 imag0, imag1);
13516 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13517 }
13518 }
13519
13520 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13521 if (icond && TREE_CODE (icond) == INTEGER_CST)
13522 {
13523 if (integer_zerop (icond))
13524 {
13525 if (code == EQ_EXPR)
13526 return omit_two_operands_loc (loc, type, boolean_false_node,
13527 real0, real1);
13528 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13529 }
13530 else
13531 {
13532 if (code == NE_EXPR)
13533 return omit_two_operands_loc (loc, type, boolean_true_node,
13534 real0, real1);
13535 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13536 }
13537 }
13538 }
13539
13540 return NULL_TREE;
13541
13542 case LT_EXPR:
13543 case GT_EXPR:
13544 case LE_EXPR:
13545 case GE_EXPR:
13546 tem = fold_comparison (loc, code, type, op0, op1);
13547 if (tem != NULL_TREE)
13548 return tem;
13549
13550 /* Transform comparisons of the form X +- C CMP X. */
13551 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13552 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13553 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13554 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13555 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13556 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13557 {
13558 tree arg01 = TREE_OPERAND (arg0, 1);
13559 enum tree_code code0 = TREE_CODE (arg0);
13560 int is_positive;
13561
13562 if (TREE_CODE (arg01) == REAL_CST)
13563 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13564 else
13565 is_positive = tree_int_cst_sgn (arg01);
13566
13567 /* (X - c) > X becomes false. */
13568 if (code == GT_EXPR
13569 && ((code0 == MINUS_EXPR && is_positive >= 0)
13570 || (code0 == PLUS_EXPR && is_positive <= 0)))
13571 {
13572 if (TREE_CODE (arg01) == INTEGER_CST
13573 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13574 fold_overflow_warning (("assuming signed overflow does not "
13575 "occur when assuming that (X - c) > X "
13576 "is always false"),
13577 WARN_STRICT_OVERFLOW_ALL);
13578 return constant_boolean_node (0, type);
13579 }
13580
13581 /* Likewise (X + c) < X becomes false. */
13582 if (code == LT_EXPR
13583 && ((code0 == PLUS_EXPR && is_positive >= 0)
13584 || (code0 == MINUS_EXPR && is_positive <= 0)))
13585 {
13586 if (TREE_CODE (arg01) == INTEGER_CST
13587 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13588 fold_overflow_warning (("assuming signed overflow does not "
13589 "occur when assuming that "
13590 "(X + c) < X is always false"),
13591 WARN_STRICT_OVERFLOW_ALL);
13592 return constant_boolean_node (0, type);
13593 }
13594
13595 /* Convert (X - c) <= X to true. */
13596 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13597 && code == LE_EXPR
13598 && ((code0 == MINUS_EXPR && is_positive >= 0)
13599 || (code0 == PLUS_EXPR && is_positive <= 0)))
13600 {
13601 if (TREE_CODE (arg01) == INTEGER_CST
13602 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13603 fold_overflow_warning (("assuming signed overflow does not "
13604 "occur when assuming that "
13605 "(X - c) <= X is always true"),
13606 WARN_STRICT_OVERFLOW_ALL);
13607 return constant_boolean_node (1, type);
13608 }
13609
13610 /* Convert (X + c) >= X to true. */
13611 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13612 && code == GE_EXPR
13613 && ((code0 == PLUS_EXPR && is_positive >= 0)
13614 || (code0 == MINUS_EXPR && is_positive <= 0)))
13615 {
13616 if (TREE_CODE (arg01) == INTEGER_CST
13617 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13618 fold_overflow_warning (("assuming signed overflow does not "
13619 "occur when assuming that "
13620 "(X + c) >= X is always true"),
13621 WARN_STRICT_OVERFLOW_ALL);
13622 return constant_boolean_node (1, type);
13623 }
13624
13625 if (TREE_CODE (arg01) == INTEGER_CST)
13626 {
13627 /* Convert X + c > X and X - c < X to true for integers. */
13628 if (code == GT_EXPR
13629 && ((code0 == PLUS_EXPR && is_positive > 0)
13630 || (code0 == MINUS_EXPR && is_positive < 0)))
13631 {
13632 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13633 fold_overflow_warning (("assuming signed overflow does "
13634 "not occur when assuming that "
13635 "(X + c) > X is always true"),
13636 WARN_STRICT_OVERFLOW_ALL);
13637 return constant_boolean_node (1, type);
13638 }
13639
13640 if (code == LT_EXPR
13641 && ((code0 == MINUS_EXPR && is_positive > 0)
13642 || (code0 == PLUS_EXPR && is_positive < 0)))
13643 {
13644 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13645 fold_overflow_warning (("assuming signed overflow does "
13646 "not occur when assuming that "
13647 "(X - c) < X is always true"),
13648 WARN_STRICT_OVERFLOW_ALL);
13649 return constant_boolean_node (1, type);
13650 }
13651
13652 /* Convert X + c <= X and X - c >= X to false for integers. */
13653 if (code == LE_EXPR
13654 && ((code0 == PLUS_EXPR && is_positive > 0)
13655 || (code0 == MINUS_EXPR && is_positive < 0)))
13656 {
13657 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13658 fold_overflow_warning (("assuming signed overflow does "
13659 "not occur when assuming that "
13660 "(X + c) <= X is always false"),
13661 WARN_STRICT_OVERFLOW_ALL);
13662 return constant_boolean_node (0, type);
13663 }
13664
13665 if (code == GE_EXPR
13666 && ((code0 == MINUS_EXPR && is_positive > 0)
13667 || (code0 == PLUS_EXPR && is_positive < 0)))
13668 {
13669 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13670 fold_overflow_warning (("assuming signed overflow does "
13671 "not occur when assuming that "
13672 "(X - c) >= X is always false"),
13673 WARN_STRICT_OVERFLOW_ALL);
13674 return constant_boolean_node (0, type);
13675 }
13676 }
13677 }
13678
13679 /* Comparisons with the highest or lowest possible integer of
13680 the specified precision will have known values. */
13681 {
13682 tree arg1_type = TREE_TYPE (arg1);
13683 unsigned int width = TYPE_PRECISION (arg1_type);
13684
13685 if (TREE_CODE (arg1) == INTEGER_CST
13686 && width <= HOST_BITS_PER_DOUBLE_INT
13687 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13688 {
13689 HOST_WIDE_INT signed_max_hi;
13690 unsigned HOST_WIDE_INT signed_max_lo;
13691 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13692
13693 if (width <= HOST_BITS_PER_WIDE_INT)
13694 {
13695 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13696 - 1;
13697 signed_max_hi = 0;
13698 max_hi = 0;
13699
13700 if (TYPE_UNSIGNED (arg1_type))
13701 {
13702 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13703 min_lo = 0;
13704 min_hi = 0;
13705 }
13706 else
13707 {
13708 max_lo = signed_max_lo;
13709 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13710 min_hi = -1;
13711 }
13712 }
13713 else
13714 {
13715 width -= HOST_BITS_PER_WIDE_INT;
13716 signed_max_lo = -1;
13717 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13718 - 1;
13719 max_lo = -1;
13720 min_lo = 0;
13721
13722 if (TYPE_UNSIGNED (arg1_type))
13723 {
13724 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13725 min_hi = 0;
13726 }
13727 else
13728 {
13729 max_hi = signed_max_hi;
13730 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13731 }
13732 }
13733
13734 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13735 && TREE_INT_CST_LOW (arg1) == max_lo)
13736 switch (code)
13737 {
13738 case GT_EXPR:
13739 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13740
13741 case GE_EXPR:
13742 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13743
13744 case LE_EXPR:
13745 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13746
13747 case LT_EXPR:
13748 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13749
13750 /* The GE_EXPR and LT_EXPR cases above are not normally
13751 reached because of previous transformations. */
13752
13753 default:
13754 break;
13755 }
13756 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13757 == max_hi
13758 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13759 switch (code)
13760 {
13761 case GT_EXPR:
13762 arg1 = const_binop (PLUS_EXPR, arg1,
13763 build_int_cst (TREE_TYPE (arg1), 1));
13764 return fold_build2_loc (loc, EQ_EXPR, type,
13765 fold_convert_loc (loc,
13766 TREE_TYPE (arg1), arg0),
13767 arg1);
13768 case LE_EXPR:
13769 arg1 = const_binop (PLUS_EXPR, arg1,
13770 build_int_cst (TREE_TYPE (arg1), 1));
13771 return fold_build2_loc (loc, NE_EXPR, type,
13772 fold_convert_loc (loc, TREE_TYPE (arg1),
13773 arg0),
13774 arg1);
13775 default:
13776 break;
13777 }
13778 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13779 == min_hi
13780 && TREE_INT_CST_LOW (arg1) == min_lo)
13781 switch (code)
13782 {
13783 case LT_EXPR:
13784 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13785
13786 case LE_EXPR:
13787 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13788
13789 case GE_EXPR:
13790 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13791
13792 case GT_EXPR:
13793 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13794
13795 default:
13796 break;
13797 }
13798 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13799 == min_hi
13800 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13801 switch (code)
13802 {
13803 case GE_EXPR:
13804 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13805 return fold_build2_loc (loc, NE_EXPR, type,
13806 fold_convert_loc (loc,
13807 TREE_TYPE (arg1), arg0),
13808 arg1);
13809 case LT_EXPR:
13810 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13811 return fold_build2_loc (loc, EQ_EXPR, type,
13812 fold_convert_loc (loc, TREE_TYPE (arg1),
13813 arg0),
13814 arg1);
13815 default:
13816 break;
13817 }
13818
13819 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13820 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13821 && TYPE_UNSIGNED (arg1_type)
13822 /* We will flip the signedness of the comparison operator
13823 associated with the mode of arg1, so the sign bit is
13824 specified by this mode. Check that arg1 is the signed
13825 max associated with this sign bit. */
13826 && width == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13827 /* signed_type does not work on pointer types. */
13828 && INTEGRAL_TYPE_P (arg1_type))
13829 {
13830 /* The following case also applies to X < signed_max+1
13831 and X >= signed_max+1 because previous transformations. */
13832 if (code == LE_EXPR || code == GT_EXPR)
13833 {
13834 tree st = signed_type_for (arg1_type);
13835 return fold_build2_loc (loc,
13836 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13837 type, fold_convert_loc (loc, st, arg0),
13838 build_int_cst (st, 0));
13839 }
13840 }
13841 }
13842 }
13843
13844 /* If we are comparing an ABS_EXPR with a constant, we can
13845 convert all the cases into explicit comparisons, but they may
13846 well not be faster than doing the ABS and one comparison.
13847 But ABS (X) <= C is a range comparison, which becomes a subtraction
13848 and a comparison, and is probably faster. */
13849 if (code == LE_EXPR
13850 && TREE_CODE (arg1) == INTEGER_CST
13851 && TREE_CODE (arg0) == ABS_EXPR
13852 && ! TREE_SIDE_EFFECTS (arg0)
13853 && (0 != (tem = negate_expr (arg1)))
13854 && TREE_CODE (tem) == INTEGER_CST
13855 && !TREE_OVERFLOW (tem))
13856 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13857 build2 (GE_EXPR, type,
13858 TREE_OPERAND (arg0, 0), tem),
13859 build2 (LE_EXPR, type,
13860 TREE_OPERAND (arg0, 0), arg1));
13861
13862 /* Convert ABS_EXPR<x> >= 0 to true. */
13863 strict_overflow_p = false;
13864 if (code == GE_EXPR
13865 && (integer_zerop (arg1)
13866 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13867 && real_zerop (arg1)))
13868 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13869 {
13870 if (strict_overflow_p)
13871 fold_overflow_warning (("assuming signed overflow does not occur "
13872 "when simplifying comparison of "
13873 "absolute value and zero"),
13874 WARN_STRICT_OVERFLOW_CONDITIONAL);
13875 return omit_one_operand_loc (loc, type,
13876 constant_boolean_node (true, type),
13877 arg0);
13878 }
13879
13880 /* Convert ABS_EXPR<x> < 0 to false. */
13881 strict_overflow_p = false;
13882 if (code == LT_EXPR
13883 && (integer_zerop (arg1) || real_zerop (arg1))
13884 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13885 {
13886 if (strict_overflow_p)
13887 fold_overflow_warning (("assuming signed overflow does not occur "
13888 "when simplifying comparison of "
13889 "absolute value and zero"),
13890 WARN_STRICT_OVERFLOW_CONDITIONAL);
13891 return omit_one_operand_loc (loc, type,
13892 constant_boolean_node (false, type),
13893 arg0);
13894 }
13895
13896 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13897 and similarly for >= into !=. */
13898 if ((code == LT_EXPR || code == GE_EXPR)
13899 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13900 && TREE_CODE (arg1) == LSHIFT_EXPR
13901 && integer_onep (TREE_OPERAND (arg1, 0)))
13902 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13903 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13904 TREE_OPERAND (arg1, 1)),
13905 build_zero_cst (TREE_TYPE (arg0)));
13906
13907 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13908 otherwise Y might be >= # of bits in X's type and thus e.g.
13909 (unsigned char) (1 << Y) for Y 15 might be 0.
13910 If the cast is widening, then 1 << Y should have unsigned type,
13911 otherwise if Y is number of bits in the signed shift type minus 1,
13912 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13913 31 might be 0xffffffff80000000. */
13914 if ((code == LT_EXPR || code == GE_EXPR)
13915 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13916 && CONVERT_EXPR_P (arg1)
13917 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13918 && (TYPE_PRECISION (TREE_TYPE (arg1))
13919 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13920 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13921 || (TYPE_PRECISION (TREE_TYPE (arg1))
13922 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13923 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13924 {
13925 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13926 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13927 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13928 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13929 build_zero_cst (TREE_TYPE (arg0)));
13930 }
13931
13932 return NULL_TREE;
13933
13934 case UNORDERED_EXPR:
13935 case ORDERED_EXPR:
13936 case UNLT_EXPR:
13937 case UNLE_EXPR:
13938 case UNGT_EXPR:
13939 case UNGE_EXPR:
13940 case UNEQ_EXPR:
13941 case LTGT_EXPR:
13942 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13943 {
13944 t1 = fold_relational_const (code, type, arg0, arg1);
13945 if (t1 != NULL_TREE)
13946 return t1;
13947 }
13948
13949 /* If the first operand is NaN, the result is constant. */
13950 if (TREE_CODE (arg0) == REAL_CST
13951 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13952 && (code != LTGT_EXPR || ! flag_trapping_math))
13953 {
13954 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13955 ? integer_zero_node
13956 : integer_one_node;
13957 return omit_one_operand_loc (loc, type, t1, arg1);
13958 }
13959
13960 /* If the second operand is NaN, the result is constant. */
13961 if (TREE_CODE (arg1) == REAL_CST
13962 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13963 && (code != LTGT_EXPR || ! flag_trapping_math))
13964 {
13965 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13966 ? integer_zero_node
13967 : integer_one_node;
13968 return omit_one_operand_loc (loc, type, t1, arg0);
13969 }
13970
13971 /* Simplify unordered comparison of something with itself. */
13972 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13973 && operand_equal_p (arg0, arg1, 0))
13974 return constant_boolean_node (1, type);
13975
13976 if (code == LTGT_EXPR
13977 && !flag_trapping_math
13978 && operand_equal_p (arg0, arg1, 0))
13979 return constant_boolean_node (0, type);
13980
13981 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13982 {
13983 tree targ0 = strip_float_extensions (arg0);
13984 tree targ1 = strip_float_extensions (arg1);
13985 tree newtype = TREE_TYPE (targ0);
13986
13987 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13988 newtype = TREE_TYPE (targ1);
13989
13990 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13991 return fold_build2_loc (loc, code, type,
13992 fold_convert_loc (loc, newtype, targ0),
13993 fold_convert_loc (loc, newtype, targ1));
13994 }
13995
13996 return NULL_TREE;
13997
13998 case COMPOUND_EXPR:
13999 /* When pedantic, a compound expression can be neither an lvalue
14000 nor an integer constant expression. */
14001 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
14002 return NULL_TREE;
14003 /* Don't let (0, 0) be null pointer constant. */
14004 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
14005 : fold_convert_loc (loc, type, arg1);
14006 return pedantic_non_lvalue_loc (loc, tem);
14007
14008 case COMPLEX_EXPR:
14009 if ((TREE_CODE (arg0) == REAL_CST
14010 && TREE_CODE (arg1) == REAL_CST)
14011 || (TREE_CODE (arg0) == INTEGER_CST
14012 && TREE_CODE (arg1) == INTEGER_CST))
14013 return build_complex (type, arg0, arg1);
14014 if (TREE_CODE (arg0) == REALPART_EXPR
14015 && TREE_CODE (arg1) == IMAGPART_EXPR
14016 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14017 && operand_equal_p (TREE_OPERAND (arg0, 0),
14018 TREE_OPERAND (arg1, 0), 0))
14019 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14020 TREE_OPERAND (arg1, 0));
14021 return NULL_TREE;
14022
14023 case ASSERT_EXPR:
14024 /* An ASSERT_EXPR should never be passed to fold_binary. */
14025 gcc_unreachable ();
14026
14027 case VEC_PACK_TRUNC_EXPR:
14028 case VEC_PACK_FIX_TRUNC_EXPR:
14029 {
14030 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14031 tree *elts;
14032
14033 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14034 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14035 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14036 return NULL_TREE;
14037
14038 elts = XALLOCAVEC (tree, nelts);
14039 if (!vec_cst_ctor_to_array (arg0, elts)
14040 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14041 return NULL_TREE;
14042
14043 for (i = 0; i < nelts; i++)
14044 {
14045 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14046 ? NOP_EXPR : FIX_TRUNC_EXPR,
14047 TREE_TYPE (type), elts[i]);
14048 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14049 return NULL_TREE;
14050 }
14051
14052 return build_vector (type, elts);
14053 }
14054
14055 case VEC_WIDEN_MULT_LO_EXPR:
14056 case VEC_WIDEN_MULT_HI_EXPR:
14057 case VEC_WIDEN_MULT_EVEN_EXPR:
14058 case VEC_WIDEN_MULT_ODD_EXPR:
14059 {
14060 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14061 unsigned int out, ofs, scale;
14062 tree *elts;
14063
14064 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14065 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14066 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14067 return NULL_TREE;
14068
14069 elts = XALLOCAVEC (tree, nelts * 4);
14070 if (!vec_cst_ctor_to_array (arg0, elts)
14071 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14072 return NULL_TREE;
14073
14074 if (code == VEC_WIDEN_MULT_LO_EXPR)
14075 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14076 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14077 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14078 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14079 scale = 1, ofs = 0;
14080 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14081 scale = 1, ofs = 1;
14082
14083 for (out = 0; out < nelts; out++)
14084 {
14085 unsigned int in1 = (out << scale) + ofs;
14086 unsigned int in2 = in1 + nelts * 2;
14087 tree t1, t2;
14088
14089 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14090 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14091
14092 if (t1 == NULL_TREE || t2 == NULL_TREE)
14093 return NULL_TREE;
14094 elts[out] = const_binop (MULT_EXPR, t1, t2);
14095 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14096 return NULL_TREE;
14097 }
14098
14099 return build_vector (type, elts);
14100 }
14101
14102 default:
14103 return NULL_TREE;
14104 } /* switch (code) */
14105 }
14106
14107 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14108 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14109 of GOTO_EXPR. */
14110
14111 static tree
14112 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14113 {
14114 switch (TREE_CODE (*tp))
14115 {
14116 case LABEL_EXPR:
14117 return *tp;
14118
14119 case GOTO_EXPR:
14120 *walk_subtrees = 0;
14121
14122 /* ... fall through ... */
14123
14124 default:
14125 return NULL_TREE;
14126 }
14127 }
14128
14129 /* Return whether the sub-tree ST contains a label which is accessible from
14130 outside the sub-tree. */
14131
14132 static bool
14133 contains_label_p (tree st)
14134 {
14135 return
14136 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14137 }
14138
14139 /* Fold a ternary expression of code CODE and type TYPE with operands
14140 OP0, OP1, and OP2. Return the folded expression if folding is
14141 successful. Otherwise, return NULL_TREE. */
14142
14143 tree
14144 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14145 tree op0, tree op1, tree op2)
14146 {
14147 tree tem;
14148 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14149 enum tree_code_class kind = TREE_CODE_CLASS (code);
14150
14151 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14152 && TREE_CODE_LENGTH (code) == 3);
14153
14154 /* Strip any conversions that don't change the mode. This is safe
14155 for every expression, except for a comparison expression because
14156 its signedness is derived from its operands. So, in the latter
14157 case, only strip conversions that don't change the signedness.
14158
14159 Note that this is done as an internal manipulation within the
14160 constant folder, in order to find the simplest representation of
14161 the arguments so that their form can be studied. In any cases,
14162 the appropriate type conversions should be put back in the tree
14163 that will get out of the constant folder. */
14164 if (op0)
14165 {
14166 arg0 = op0;
14167 STRIP_NOPS (arg0);
14168 }
14169
14170 if (op1)
14171 {
14172 arg1 = op1;
14173 STRIP_NOPS (arg1);
14174 }
14175
14176 if (op2)
14177 {
14178 arg2 = op2;
14179 STRIP_NOPS (arg2);
14180 }
14181
14182 switch (code)
14183 {
14184 case COMPONENT_REF:
14185 if (TREE_CODE (arg0) == CONSTRUCTOR
14186 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14187 {
14188 unsigned HOST_WIDE_INT idx;
14189 tree field, value;
14190 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14191 if (field == arg1)
14192 return value;
14193 }
14194 return NULL_TREE;
14195
14196 case COND_EXPR:
14197 case VEC_COND_EXPR:
14198 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14199 so all simple results must be passed through pedantic_non_lvalue. */
14200 if (TREE_CODE (arg0) == INTEGER_CST)
14201 {
14202 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14203 tem = integer_zerop (arg0) ? op2 : op1;
14204 /* Only optimize constant conditions when the selected branch
14205 has the same type as the COND_EXPR. This avoids optimizing
14206 away "c ? x : throw", where the throw has a void type.
14207 Avoid throwing away that operand which contains label. */
14208 if ((!TREE_SIDE_EFFECTS (unused_op)
14209 || !contains_label_p (unused_op))
14210 && (! VOID_TYPE_P (TREE_TYPE (tem))
14211 || VOID_TYPE_P (type)))
14212 return pedantic_non_lvalue_loc (loc, tem);
14213 return NULL_TREE;
14214 }
14215 else if (TREE_CODE (arg0) == VECTOR_CST)
14216 {
14217 if (integer_all_onesp (arg0))
14218 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14219 if (integer_zerop (arg0))
14220 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14221
14222 if ((TREE_CODE (arg1) == VECTOR_CST
14223 || TREE_CODE (arg1) == CONSTRUCTOR)
14224 && (TREE_CODE (arg2) == VECTOR_CST
14225 || TREE_CODE (arg2) == CONSTRUCTOR))
14226 {
14227 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14228 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14229 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14230 for (i = 0; i < nelts; i++)
14231 {
14232 tree val = VECTOR_CST_ELT (arg0, i);
14233 if (integer_all_onesp (val))
14234 sel[i] = i;
14235 else if (integer_zerop (val))
14236 sel[i] = nelts + i;
14237 else /* Currently unreachable. */
14238 return NULL_TREE;
14239 }
14240 tree t = fold_vec_perm (type, arg1, arg2, sel);
14241 if (t != NULL_TREE)
14242 return t;
14243 }
14244 }
14245
14246 if (operand_equal_p (arg1, op2, 0))
14247 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14248
14249 /* If we have A op B ? A : C, we may be able to convert this to a
14250 simpler expression, depending on the operation and the values
14251 of B and C. Signed zeros prevent all of these transformations,
14252 for reasons given above each one.
14253
14254 Also try swapping the arguments and inverting the conditional. */
14255 if (COMPARISON_CLASS_P (arg0)
14256 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14257 arg1, TREE_OPERAND (arg0, 1))
14258 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14259 {
14260 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14261 if (tem)
14262 return tem;
14263 }
14264
14265 if (COMPARISON_CLASS_P (arg0)
14266 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14267 op2,
14268 TREE_OPERAND (arg0, 1))
14269 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14270 {
14271 location_t loc0 = expr_location_or (arg0, loc);
14272 tem = fold_invert_truthvalue (loc0, arg0);
14273 if (tem && COMPARISON_CLASS_P (tem))
14274 {
14275 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14276 if (tem)
14277 return tem;
14278 }
14279 }
14280
14281 /* If the second operand is simpler than the third, swap them
14282 since that produces better jump optimization results. */
14283 if (truth_value_p (TREE_CODE (arg0))
14284 && tree_swap_operands_p (op1, op2, false))
14285 {
14286 location_t loc0 = expr_location_or (arg0, loc);
14287 /* See if this can be inverted. If it can't, possibly because
14288 it was a floating-point inequality comparison, don't do
14289 anything. */
14290 tem = fold_invert_truthvalue (loc0, arg0);
14291 if (tem)
14292 return fold_build3_loc (loc, code, type, tem, op2, op1);
14293 }
14294
14295 /* Convert A ? 1 : 0 to simply A. */
14296 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14297 : (integer_onep (op1)
14298 && !VECTOR_TYPE_P (type)))
14299 && integer_zerop (op2)
14300 /* If we try to convert OP0 to our type, the
14301 call to fold will try to move the conversion inside
14302 a COND, which will recurse. In that case, the COND_EXPR
14303 is probably the best choice, so leave it alone. */
14304 && type == TREE_TYPE (arg0))
14305 return pedantic_non_lvalue_loc (loc, arg0);
14306
14307 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14308 over COND_EXPR in cases such as floating point comparisons. */
14309 if (integer_zerop (op1)
14310 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14311 : (integer_onep (op2)
14312 && !VECTOR_TYPE_P (type)))
14313 && truth_value_p (TREE_CODE (arg0)))
14314 return pedantic_non_lvalue_loc (loc,
14315 fold_convert_loc (loc, type,
14316 invert_truthvalue_loc (loc,
14317 arg0)));
14318
14319 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14320 if (TREE_CODE (arg0) == LT_EXPR
14321 && integer_zerop (TREE_OPERAND (arg0, 1))
14322 && integer_zerop (op2)
14323 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14324 {
14325 /* sign_bit_p looks through both zero and sign extensions,
14326 but for this optimization only sign extensions are
14327 usable. */
14328 tree tem2 = TREE_OPERAND (arg0, 0);
14329 while (tem != tem2)
14330 {
14331 if (TREE_CODE (tem2) != NOP_EXPR
14332 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14333 {
14334 tem = NULL_TREE;
14335 break;
14336 }
14337 tem2 = TREE_OPERAND (tem2, 0);
14338 }
14339 /* sign_bit_p only checks ARG1 bits within A's precision.
14340 If <sign bit of A> has wider type than A, bits outside
14341 of A's precision in <sign bit of A> need to be checked.
14342 If they are all 0, this optimization needs to be done
14343 in unsigned A's type, if they are all 1 in signed A's type,
14344 otherwise this can't be done. */
14345 if (tem
14346 && TYPE_PRECISION (TREE_TYPE (tem))
14347 < TYPE_PRECISION (TREE_TYPE (arg1))
14348 && TYPE_PRECISION (TREE_TYPE (tem))
14349 < TYPE_PRECISION (type))
14350 {
14351 unsigned HOST_WIDE_INT mask_lo;
14352 HOST_WIDE_INT mask_hi;
14353 int inner_width, outer_width;
14354 tree tem_type;
14355
14356 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14357 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14358 if (outer_width > TYPE_PRECISION (type))
14359 outer_width = TYPE_PRECISION (type);
14360
14361 if (outer_width > HOST_BITS_PER_WIDE_INT)
14362 {
14363 mask_hi = (HOST_WIDE_INT_M1U
14364 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14365 mask_lo = -1;
14366 }
14367 else
14368 {
14369 mask_hi = 0;
14370 mask_lo = (HOST_WIDE_INT_M1U
14371 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14372 }
14373 if (inner_width > HOST_BITS_PER_WIDE_INT)
14374 {
14375 mask_hi &= ~(HOST_WIDE_INT_M1U
14376 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14377 mask_lo = 0;
14378 }
14379 else
14380 mask_lo &= ~(HOST_WIDE_INT_M1U
14381 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14382
14383 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14384 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14385 {
14386 tem_type = signed_type_for (TREE_TYPE (tem));
14387 tem = fold_convert_loc (loc, tem_type, tem);
14388 }
14389 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14390 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14391 {
14392 tem_type = unsigned_type_for (TREE_TYPE (tem));
14393 tem = fold_convert_loc (loc, tem_type, tem);
14394 }
14395 else
14396 tem = NULL;
14397 }
14398
14399 if (tem)
14400 return
14401 fold_convert_loc (loc, type,
14402 fold_build2_loc (loc, BIT_AND_EXPR,
14403 TREE_TYPE (tem), tem,
14404 fold_convert_loc (loc,
14405 TREE_TYPE (tem),
14406 arg1)));
14407 }
14408
14409 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14410 already handled above. */
14411 if (TREE_CODE (arg0) == BIT_AND_EXPR
14412 && integer_onep (TREE_OPERAND (arg0, 1))
14413 && integer_zerop (op2)
14414 && integer_pow2p (arg1))
14415 {
14416 tree tem = TREE_OPERAND (arg0, 0);
14417 STRIP_NOPS (tem);
14418 if (TREE_CODE (tem) == RSHIFT_EXPR
14419 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14420 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14421 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14422 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14423 TREE_OPERAND (tem, 0), arg1);
14424 }
14425
14426 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14427 is probably obsolete because the first operand should be a
14428 truth value (that's why we have the two cases above), but let's
14429 leave it in until we can confirm this for all front-ends. */
14430 if (integer_zerop (op2)
14431 && TREE_CODE (arg0) == NE_EXPR
14432 && integer_zerop (TREE_OPERAND (arg0, 1))
14433 && integer_pow2p (arg1)
14434 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14435 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14436 arg1, OEP_ONLY_CONST))
14437 return pedantic_non_lvalue_loc (loc,
14438 fold_convert_loc (loc, type,
14439 TREE_OPERAND (arg0, 0)));
14440
14441 /* Disable the transformations below for vectors, since
14442 fold_binary_op_with_conditional_arg may undo them immediately,
14443 yielding an infinite loop. */
14444 if (code == VEC_COND_EXPR)
14445 return NULL_TREE;
14446
14447 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14448 if (integer_zerop (op2)
14449 && truth_value_p (TREE_CODE (arg0))
14450 && truth_value_p (TREE_CODE (arg1))
14451 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14452 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14453 : TRUTH_ANDIF_EXPR,
14454 type, fold_convert_loc (loc, type, arg0), arg1);
14455
14456 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14457 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14458 && truth_value_p (TREE_CODE (arg0))
14459 && truth_value_p (TREE_CODE (arg1))
14460 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14461 {
14462 location_t loc0 = expr_location_or (arg0, loc);
14463 /* Only perform transformation if ARG0 is easily inverted. */
14464 tem = fold_invert_truthvalue (loc0, arg0);
14465 if (tem)
14466 return fold_build2_loc (loc, code == VEC_COND_EXPR
14467 ? BIT_IOR_EXPR
14468 : TRUTH_ORIF_EXPR,
14469 type, fold_convert_loc (loc, type, tem),
14470 arg1);
14471 }
14472
14473 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14474 if (integer_zerop (arg1)
14475 && truth_value_p (TREE_CODE (arg0))
14476 && truth_value_p (TREE_CODE (op2))
14477 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14478 {
14479 location_t loc0 = expr_location_or (arg0, loc);
14480 /* Only perform transformation if ARG0 is easily inverted. */
14481 tem = fold_invert_truthvalue (loc0, arg0);
14482 if (tem)
14483 return fold_build2_loc (loc, code == VEC_COND_EXPR
14484 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14485 type, fold_convert_loc (loc, type, tem),
14486 op2);
14487 }
14488
14489 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14490 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14491 && truth_value_p (TREE_CODE (arg0))
14492 && truth_value_p (TREE_CODE (op2))
14493 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14494 return fold_build2_loc (loc, code == VEC_COND_EXPR
14495 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14496 type, fold_convert_loc (loc, type, arg0), op2);
14497
14498 return NULL_TREE;
14499
14500 case CALL_EXPR:
14501 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14502 of fold_ternary on them. */
14503 gcc_unreachable ();
14504
14505 case BIT_FIELD_REF:
14506 if ((TREE_CODE (arg0) == VECTOR_CST
14507 || (TREE_CODE (arg0) == CONSTRUCTOR
14508 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14509 && (type == TREE_TYPE (TREE_TYPE (arg0))
14510 || (TREE_CODE (type) == VECTOR_TYPE
14511 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14512 {
14513 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14514 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
14515 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
14516 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
14517
14518 if (n != 0
14519 && (idx % width) == 0
14520 && (n % width) == 0
14521 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14522 {
14523 idx = idx / width;
14524 n = n / width;
14525
14526 if (TREE_CODE (arg0) == VECTOR_CST)
14527 {
14528 if (n == 1)
14529 return VECTOR_CST_ELT (arg0, idx);
14530
14531 tree *vals = XALLOCAVEC (tree, n);
14532 for (unsigned i = 0; i < n; ++i)
14533 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14534 return build_vector (type, vals);
14535 }
14536
14537 /* Constructor elements can be subvectors. */
14538 unsigned HOST_WIDE_INT k = 1;
14539 if (CONSTRUCTOR_NELTS (arg0) != 0)
14540 {
14541 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14542 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14543 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14544 }
14545
14546 /* We keep an exact subset of the constructor elements. */
14547 if ((idx % k) == 0 && (n % k) == 0)
14548 {
14549 if (CONSTRUCTOR_NELTS (arg0) == 0)
14550 return build_constructor (type, NULL);
14551 idx /= k;
14552 n /= k;
14553 if (n == 1)
14554 {
14555 if (idx < CONSTRUCTOR_NELTS (arg0))
14556 return CONSTRUCTOR_ELT (arg0, idx)->value;
14557 return build_zero_cst (type);
14558 }
14559
14560 vec<constructor_elt, va_gc> *vals;
14561 vec_alloc (vals, n);
14562 for (unsigned i = 0;
14563 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14564 ++i)
14565 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14566 CONSTRUCTOR_ELT
14567 (arg0, idx + i)->value);
14568 return build_constructor (type, vals);
14569 }
14570 /* The bitfield references a single constructor element. */
14571 else if (idx + n <= (idx / k + 1) * k)
14572 {
14573 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14574 return build_zero_cst (type);
14575 else if (n == k)
14576 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14577 else
14578 return fold_build3_loc (loc, code, type,
14579 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14580 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14581 }
14582 }
14583 }
14584
14585 /* A bit-field-ref that referenced the full argument can be stripped. */
14586 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14587 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
14588 && integer_zerop (op2))
14589 return fold_convert_loc (loc, type, arg0);
14590
14591 /* On constants we can use native encode/interpret to constant
14592 fold (nearly) all BIT_FIELD_REFs. */
14593 if (CONSTANT_CLASS_P (arg0)
14594 && can_native_interpret_type_p (type)
14595 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
14596 /* This limitation should not be necessary, we just need to
14597 round this up to mode size. */
14598 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
14599 /* Need bit-shifting of the buffer to relax the following. */
14600 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
14601 {
14602 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
14603 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
14604 unsigned HOST_WIDE_INT clen;
14605 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
14606 /* ??? We cannot tell native_encode_expr to start at
14607 some random byte only. So limit us to a reasonable amount
14608 of work. */
14609 if (clen <= 4096)
14610 {
14611 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14612 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14613 if (len > 0
14614 && len * BITS_PER_UNIT >= bitpos + bitsize)
14615 {
14616 tree v = native_interpret_expr (type,
14617 b + bitpos / BITS_PER_UNIT,
14618 bitsize / BITS_PER_UNIT);
14619 if (v)
14620 return v;
14621 }
14622 }
14623 }
14624
14625 return NULL_TREE;
14626
14627 case FMA_EXPR:
14628 /* For integers we can decompose the FMA if possible. */
14629 if (TREE_CODE (arg0) == INTEGER_CST
14630 && TREE_CODE (arg1) == INTEGER_CST)
14631 return fold_build2_loc (loc, PLUS_EXPR, type,
14632 const_binop (MULT_EXPR, arg0, arg1), arg2);
14633 if (integer_zerop (arg2))
14634 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14635
14636 return fold_fma (loc, type, arg0, arg1, arg2);
14637
14638 case VEC_PERM_EXPR:
14639 if (TREE_CODE (arg2) == VECTOR_CST)
14640 {
14641 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14642 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14643 tree t;
14644 bool need_mask_canon = false;
14645 bool all_in_vec0 = true;
14646 bool all_in_vec1 = true;
14647 bool maybe_identity = true;
14648 bool single_arg = (op0 == op1);
14649 bool changed = false;
14650
14651 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14652 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14653 for (i = 0; i < nelts; i++)
14654 {
14655 tree val = VECTOR_CST_ELT (arg2, i);
14656 if (TREE_CODE (val) != INTEGER_CST)
14657 return NULL_TREE;
14658
14659 sel[i] = TREE_INT_CST_LOW (val) & mask;
14660 if (TREE_INT_CST_HIGH (val)
14661 || ((unsigned HOST_WIDE_INT)
14662 TREE_INT_CST_LOW (val) != sel[i]))
14663 need_mask_canon = true;
14664
14665 if (sel[i] < nelts)
14666 all_in_vec1 = false;
14667 else
14668 all_in_vec0 = false;
14669
14670 if ((sel[i] & (nelts-1)) != i)
14671 maybe_identity = false;
14672 }
14673
14674 if (maybe_identity)
14675 {
14676 if (all_in_vec0)
14677 return op0;
14678 if (all_in_vec1)
14679 return op1;
14680 }
14681
14682 if (all_in_vec0)
14683 op1 = op0;
14684 else if (all_in_vec1)
14685 {
14686 op0 = op1;
14687 for (i = 0; i < nelts; i++)
14688 sel[i] -= nelts;
14689 need_mask_canon = true;
14690 }
14691
14692 if ((TREE_CODE (op0) == VECTOR_CST
14693 || TREE_CODE (op0) == CONSTRUCTOR)
14694 && (TREE_CODE (op1) == VECTOR_CST
14695 || TREE_CODE (op1) == CONSTRUCTOR))
14696 {
14697 t = fold_vec_perm (type, op0, op1, sel);
14698 if (t != NULL_TREE)
14699 return t;
14700 }
14701
14702 if (op0 == op1 && !single_arg)
14703 changed = true;
14704
14705 if (need_mask_canon && arg2 == op2)
14706 {
14707 tree *tsel = XALLOCAVEC (tree, nelts);
14708 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14709 for (i = 0; i < nelts; i++)
14710 tsel[i] = build_int_cst (eltype, sel[i]);
14711 op2 = build_vector (TREE_TYPE (arg2), tsel);
14712 changed = true;
14713 }
14714
14715 if (changed)
14716 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14717 }
14718 return NULL_TREE;
14719
14720 default:
14721 return NULL_TREE;
14722 } /* switch (code) */
14723 }
14724
14725 /* Perform constant folding and related simplification of EXPR.
14726 The related simplifications include x*1 => x, x*0 => 0, etc.,
14727 and application of the associative law.
14728 NOP_EXPR conversions may be removed freely (as long as we
14729 are careful not to change the type of the overall expression).
14730 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14731 but we can constant-fold them if they have constant operands. */
14732
14733 #ifdef ENABLE_FOLD_CHECKING
14734 # define fold(x) fold_1 (x)
14735 static tree fold_1 (tree);
14736 static
14737 #endif
14738 tree
14739 fold (tree expr)
14740 {
14741 const tree t = expr;
14742 enum tree_code code = TREE_CODE (t);
14743 enum tree_code_class kind = TREE_CODE_CLASS (code);
14744 tree tem;
14745 location_t loc = EXPR_LOCATION (expr);
14746
14747 /* Return right away if a constant. */
14748 if (kind == tcc_constant)
14749 return t;
14750
14751 /* CALL_EXPR-like objects with variable numbers of operands are
14752 treated specially. */
14753 if (kind == tcc_vl_exp)
14754 {
14755 if (code == CALL_EXPR)
14756 {
14757 tem = fold_call_expr (loc, expr, false);
14758 return tem ? tem : expr;
14759 }
14760 return expr;
14761 }
14762
14763 if (IS_EXPR_CODE_CLASS (kind))
14764 {
14765 tree type = TREE_TYPE (t);
14766 tree op0, op1, op2;
14767
14768 switch (TREE_CODE_LENGTH (code))
14769 {
14770 case 1:
14771 op0 = TREE_OPERAND (t, 0);
14772 tem = fold_unary_loc (loc, code, type, op0);
14773 return tem ? tem : expr;
14774 case 2:
14775 op0 = TREE_OPERAND (t, 0);
14776 op1 = TREE_OPERAND (t, 1);
14777 tem = fold_binary_loc (loc, code, type, op0, op1);
14778 return tem ? tem : expr;
14779 case 3:
14780 op0 = TREE_OPERAND (t, 0);
14781 op1 = TREE_OPERAND (t, 1);
14782 op2 = TREE_OPERAND (t, 2);
14783 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14784 return tem ? tem : expr;
14785 default:
14786 break;
14787 }
14788 }
14789
14790 switch (code)
14791 {
14792 case ARRAY_REF:
14793 {
14794 tree op0 = TREE_OPERAND (t, 0);
14795 tree op1 = TREE_OPERAND (t, 1);
14796
14797 if (TREE_CODE (op1) == INTEGER_CST
14798 && TREE_CODE (op0) == CONSTRUCTOR
14799 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14800 {
14801 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14802 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14803 unsigned HOST_WIDE_INT begin = 0;
14804
14805 /* Find a matching index by means of a binary search. */
14806 while (begin != end)
14807 {
14808 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14809 tree index = (*elts)[middle].index;
14810
14811 if (TREE_CODE (index) == INTEGER_CST
14812 && tree_int_cst_lt (index, op1))
14813 begin = middle + 1;
14814 else if (TREE_CODE (index) == INTEGER_CST
14815 && tree_int_cst_lt (op1, index))
14816 end = middle;
14817 else if (TREE_CODE (index) == RANGE_EXPR
14818 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14819 begin = middle + 1;
14820 else if (TREE_CODE (index) == RANGE_EXPR
14821 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14822 end = middle;
14823 else
14824 return (*elts)[middle].value;
14825 }
14826 }
14827
14828 return t;
14829 }
14830
14831 /* Return a VECTOR_CST if possible. */
14832 case CONSTRUCTOR:
14833 {
14834 tree type = TREE_TYPE (t);
14835 if (TREE_CODE (type) != VECTOR_TYPE)
14836 return t;
14837
14838 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14839 unsigned HOST_WIDE_INT idx, pos = 0;
14840 tree value;
14841
14842 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14843 {
14844 if (!CONSTANT_CLASS_P (value))
14845 return t;
14846 if (TREE_CODE (value) == VECTOR_CST)
14847 {
14848 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14849 vec[pos++] = VECTOR_CST_ELT (value, i);
14850 }
14851 else
14852 vec[pos++] = value;
14853 }
14854 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14855 vec[pos] = build_zero_cst (TREE_TYPE (type));
14856
14857 return build_vector (type, vec);
14858 }
14859
14860 case CONST_DECL:
14861 return fold (DECL_INITIAL (t));
14862
14863 default:
14864 return t;
14865 } /* switch (code) */
14866 }
14867
14868 #ifdef ENABLE_FOLD_CHECKING
14869 #undef fold
14870
14871 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14872 hash_table <pointer_hash <tree_node> >);
14873 static void fold_check_failed (const_tree, const_tree);
14874 void print_fold_checksum (const_tree);
14875
14876 /* When --enable-checking=fold, compute a digest of expr before
14877 and after actual fold call to see if fold did not accidentally
14878 change original expr. */
14879
14880 tree
14881 fold (tree expr)
14882 {
14883 tree ret;
14884 struct md5_ctx ctx;
14885 unsigned char checksum_before[16], checksum_after[16];
14886 hash_table <pointer_hash <tree_node> > ht;
14887
14888 ht.create (32);
14889 md5_init_ctx (&ctx);
14890 fold_checksum_tree (expr, &ctx, ht);
14891 md5_finish_ctx (&ctx, checksum_before);
14892 ht.empty ();
14893
14894 ret = fold_1 (expr);
14895
14896 md5_init_ctx (&ctx);
14897 fold_checksum_tree (expr, &ctx, ht);
14898 md5_finish_ctx (&ctx, checksum_after);
14899 ht.dispose ();
14900
14901 if (memcmp (checksum_before, checksum_after, 16))
14902 fold_check_failed (expr, ret);
14903
14904 return ret;
14905 }
14906
14907 void
14908 print_fold_checksum (const_tree expr)
14909 {
14910 struct md5_ctx ctx;
14911 unsigned char checksum[16], cnt;
14912 hash_table <pointer_hash <tree_node> > ht;
14913
14914 ht.create (32);
14915 md5_init_ctx (&ctx);
14916 fold_checksum_tree (expr, &ctx, ht);
14917 md5_finish_ctx (&ctx, checksum);
14918 ht.dispose ();
14919 for (cnt = 0; cnt < 16; ++cnt)
14920 fprintf (stderr, "%02x", checksum[cnt]);
14921 putc ('\n', stderr);
14922 }
14923
14924 static void
14925 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14926 {
14927 internal_error ("fold check: original tree changed by fold");
14928 }
14929
14930 static void
14931 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14932 hash_table <pointer_hash <tree_node> > ht)
14933 {
14934 tree_node **slot;
14935 enum tree_code code;
14936 union tree_node buf;
14937 int i, len;
14938
14939 recursive_label:
14940 if (expr == NULL)
14941 return;
14942 slot = ht.find_slot (expr, INSERT);
14943 if (*slot != NULL)
14944 return;
14945 *slot = CONST_CAST_TREE (expr);
14946 code = TREE_CODE (expr);
14947 if (TREE_CODE_CLASS (code) == tcc_declaration
14948 && DECL_ASSEMBLER_NAME_SET_P (expr))
14949 {
14950 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14951 memcpy ((char *) &buf, expr, tree_size (expr));
14952 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14953 expr = (tree) &buf;
14954 }
14955 else if (TREE_CODE_CLASS (code) == tcc_type
14956 && (TYPE_POINTER_TO (expr)
14957 || TYPE_REFERENCE_TO (expr)
14958 || TYPE_CACHED_VALUES_P (expr)
14959 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14960 || TYPE_NEXT_VARIANT (expr)))
14961 {
14962 /* Allow these fields to be modified. */
14963 tree tmp;
14964 memcpy ((char *) &buf, expr, tree_size (expr));
14965 expr = tmp = (tree) &buf;
14966 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14967 TYPE_POINTER_TO (tmp) = NULL;
14968 TYPE_REFERENCE_TO (tmp) = NULL;
14969 TYPE_NEXT_VARIANT (tmp) = NULL;
14970 if (TYPE_CACHED_VALUES_P (tmp))
14971 {
14972 TYPE_CACHED_VALUES_P (tmp) = 0;
14973 TYPE_CACHED_VALUES (tmp) = NULL;
14974 }
14975 }
14976 md5_process_bytes (expr, tree_size (expr), ctx);
14977 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14978 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14979 if (TREE_CODE_CLASS (code) != tcc_type
14980 && TREE_CODE_CLASS (code) != tcc_declaration
14981 && code != TREE_LIST
14982 && code != SSA_NAME
14983 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14984 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14985 switch (TREE_CODE_CLASS (code))
14986 {
14987 case tcc_constant:
14988 switch (code)
14989 {
14990 case STRING_CST:
14991 md5_process_bytes (TREE_STRING_POINTER (expr),
14992 TREE_STRING_LENGTH (expr), ctx);
14993 break;
14994 case COMPLEX_CST:
14995 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14996 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14997 break;
14998 case VECTOR_CST:
14999 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
15000 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
15001 break;
15002 default:
15003 break;
15004 }
15005 break;
15006 case tcc_exceptional:
15007 switch (code)
15008 {
15009 case TREE_LIST:
15010 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15011 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15012 expr = TREE_CHAIN (expr);
15013 goto recursive_label;
15014 break;
15015 case TREE_VEC:
15016 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15017 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15018 break;
15019 default:
15020 break;
15021 }
15022 break;
15023 case tcc_expression:
15024 case tcc_reference:
15025 case tcc_comparison:
15026 case tcc_unary:
15027 case tcc_binary:
15028 case tcc_statement:
15029 case tcc_vl_exp:
15030 len = TREE_OPERAND_LENGTH (expr);
15031 for (i = 0; i < len; ++i)
15032 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15033 break;
15034 case tcc_declaration:
15035 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15036 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15037 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15038 {
15039 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15040 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15041 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15042 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15043 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15044 }
15045 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15046 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15047
15048 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15049 {
15050 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15051 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15052 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15053 }
15054 break;
15055 case tcc_type:
15056 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15057 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15058 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15059 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15060 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15061 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15062 if (INTEGRAL_TYPE_P (expr)
15063 || SCALAR_FLOAT_TYPE_P (expr))
15064 {
15065 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15066 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15067 }
15068 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15069 if (TREE_CODE (expr) == RECORD_TYPE
15070 || TREE_CODE (expr) == UNION_TYPE
15071 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15072 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15073 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15074 break;
15075 default:
15076 break;
15077 }
15078 }
15079
15080 /* Helper function for outputting the checksum of a tree T. When
15081 debugging with gdb, you can "define mynext" to be "next" followed
15082 by "call debug_fold_checksum (op0)", then just trace down till the
15083 outputs differ. */
15084
15085 DEBUG_FUNCTION void
15086 debug_fold_checksum (const_tree t)
15087 {
15088 int i;
15089 unsigned char checksum[16];
15090 struct md5_ctx ctx;
15091 hash_table <pointer_hash <tree_node> > ht;
15092 ht.create (32);
15093
15094 md5_init_ctx (&ctx);
15095 fold_checksum_tree (t, &ctx, ht);
15096 md5_finish_ctx (&ctx, checksum);
15097 ht.empty ();
15098
15099 for (i = 0; i < 16; i++)
15100 fprintf (stderr, "%d ", checksum[i]);
15101
15102 fprintf (stderr, "\n");
15103 }
15104
15105 #endif
15106
15107 /* Fold a unary tree expression with code CODE of type TYPE with an
15108 operand OP0. LOC is the location of the resulting expression.
15109 Return a folded expression if successful. Otherwise, return a tree
15110 expression with code CODE of type TYPE with an operand OP0. */
15111
15112 tree
15113 fold_build1_stat_loc (location_t loc,
15114 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15115 {
15116 tree tem;
15117 #ifdef ENABLE_FOLD_CHECKING
15118 unsigned char checksum_before[16], checksum_after[16];
15119 struct md5_ctx ctx;
15120 hash_table <pointer_hash <tree_node> > ht;
15121
15122 ht.create (32);
15123 md5_init_ctx (&ctx);
15124 fold_checksum_tree (op0, &ctx, ht);
15125 md5_finish_ctx (&ctx, checksum_before);
15126 ht.empty ();
15127 #endif
15128
15129 tem = fold_unary_loc (loc, code, type, op0);
15130 if (!tem)
15131 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15132
15133 #ifdef ENABLE_FOLD_CHECKING
15134 md5_init_ctx (&ctx);
15135 fold_checksum_tree (op0, &ctx, ht);
15136 md5_finish_ctx (&ctx, checksum_after);
15137 ht.dispose ();
15138
15139 if (memcmp (checksum_before, checksum_after, 16))
15140 fold_check_failed (op0, tem);
15141 #endif
15142 return tem;
15143 }
15144
15145 /* Fold a binary tree expression with code CODE of type TYPE with
15146 operands OP0 and OP1. LOC is the location of the resulting
15147 expression. Return a folded expression if successful. Otherwise,
15148 return a tree expression with code CODE of type TYPE with operands
15149 OP0 and OP1. */
15150
15151 tree
15152 fold_build2_stat_loc (location_t loc,
15153 enum tree_code code, tree type, tree op0, tree op1
15154 MEM_STAT_DECL)
15155 {
15156 tree tem;
15157 #ifdef ENABLE_FOLD_CHECKING
15158 unsigned char checksum_before_op0[16],
15159 checksum_before_op1[16],
15160 checksum_after_op0[16],
15161 checksum_after_op1[16];
15162 struct md5_ctx ctx;
15163 hash_table <pointer_hash <tree_node> > ht;
15164
15165 ht.create (32);
15166 md5_init_ctx (&ctx);
15167 fold_checksum_tree (op0, &ctx, ht);
15168 md5_finish_ctx (&ctx, checksum_before_op0);
15169 ht.empty ();
15170
15171 md5_init_ctx (&ctx);
15172 fold_checksum_tree (op1, &ctx, ht);
15173 md5_finish_ctx (&ctx, checksum_before_op1);
15174 ht.empty ();
15175 #endif
15176
15177 tem = fold_binary_loc (loc, code, type, op0, op1);
15178 if (!tem)
15179 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15180
15181 #ifdef ENABLE_FOLD_CHECKING
15182 md5_init_ctx (&ctx);
15183 fold_checksum_tree (op0, &ctx, ht);
15184 md5_finish_ctx (&ctx, checksum_after_op0);
15185 ht.empty ();
15186
15187 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15188 fold_check_failed (op0, tem);
15189
15190 md5_init_ctx (&ctx);
15191 fold_checksum_tree (op1, &ctx, ht);
15192 md5_finish_ctx (&ctx, checksum_after_op1);
15193 ht.dispose ();
15194
15195 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15196 fold_check_failed (op1, tem);
15197 #endif
15198 return tem;
15199 }
15200
15201 /* Fold a ternary tree expression with code CODE of type TYPE with
15202 operands OP0, OP1, and OP2. Return a folded expression if
15203 successful. Otherwise, return a tree expression with code CODE of
15204 type TYPE with operands OP0, OP1, and OP2. */
15205
15206 tree
15207 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15208 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15209 {
15210 tree tem;
15211 #ifdef ENABLE_FOLD_CHECKING
15212 unsigned char checksum_before_op0[16],
15213 checksum_before_op1[16],
15214 checksum_before_op2[16],
15215 checksum_after_op0[16],
15216 checksum_after_op1[16],
15217 checksum_after_op2[16];
15218 struct md5_ctx ctx;
15219 hash_table <pointer_hash <tree_node> > ht;
15220
15221 ht.create (32);
15222 md5_init_ctx (&ctx);
15223 fold_checksum_tree (op0, &ctx, ht);
15224 md5_finish_ctx (&ctx, checksum_before_op0);
15225 ht.empty ();
15226
15227 md5_init_ctx (&ctx);
15228 fold_checksum_tree (op1, &ctx, ht);
15229 md5_finish_ctx (&ctx, checksum_before_op1);
15230 ht.empty ();
15231
15232 md5_init_ctx (&ctx);
15233 fold_checksum_tree (op2, &ctx, ht);
15234 md5_finish_ctx (&ctx, checksum_before_op2);
15235 ht.empty ();
15236 #endif
15237
15238 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15239 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15240 if (!tem)
15241 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15242
15243 #ifdef ENABLE_FOLD_CHECKING
15244 md5_init_ctx (&ctx);
15245 fold_checksum_tree (op0, &ctx, ht);
15246 md5_finish_ctx (&ctx, checksum_after_op0);
15247 ht.empty ();
15248
15249 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15250 fold_check_failed (op0, tem);
15251
15252 md5_init_ctx (&ctx);
15253 fold_checksum_tree (op1, &ctx, ht);
15254 md5_finish_ctx (&ctx, checksum_after_op1);
15255 ht.empty ();
15256
15257 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15258 fold_check_failed (op1, tem);
15259
15260 md5_init_ctx (&ctx);
15261 fold_checksum_tree (op2, &ctx, ht);
15262 md5_finish_ctx (&ctx, checksum_after_op2);
15263 ht.dispose ();
15264
15265 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15266 fold_check_failed (op2, tem);
15267 #endif
15268 return tem;
15269 }
15270
15271 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15272 arguments in ARGARRAY, and a null static chain.
15273 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15274 of type TYPE from the given operands as constructed by build_call_array. */
15275
15276 tree
15277 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15278 int nargs, tree *argarray)
15279 {
15280 tree tem;
15281 #ifdef ENABLE_FOLD_CHECKING
15282 unsigned char checksum_before_fn[16],
15283 checksum_before_arglist[16],
15284 checksum_after_fn[16],
15285 checksum_after_arglist[16];
15286 struct md5_ctx ctx;
15287 hash_table <pointer_hash <tree_node> > ht;
15288 int i;
15289
15290 ht.create (32);
15291 md5_init_ctx (&ctx);
15292 fold_checksum_tree (fn, &ctx, ht);
15293 md5_finish_ctx (&ctx, checksum_before_fn);
15294 ht.empty ();
15295
15296 md5_init_ctx (&ctx);
15297 for (i = 0; i < nargs; i++)
15298 fold_checksum_tree (argarray[i], &ctx, ht);
15299 md5_finish_ctx (&ctx, checksum_before_arglist);
15300 ht.empty ();
15301 #endif
15302
15303 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15304
15305 #ifdef ENABLE_FOLD_CHECKING
15306 md5_init_ctx (&ctx);
15307 fold_checksum_tree (fn, &ctx, ht);
15308 md5_finish_ctx (&ctx, checksum_after_fn);
15309 ht.empty ();
15310
15311 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15312 fold_check_failed (fn, tem);
15313
15314 md5_init_ctx (&ctx);
15315 for (i = 0; i < nargs; i++)
15316 fold_checksum_tree (argarray[i], &ctx, ht);
15317 md5_finish_ctx (&ctx, checksum_after_arglist);
15318 ht.dispose ();
15319
15320 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15321 fold_check_failed (NULL_TREE, tem);
15322 #endif
15323 return tem;
15324 }
15325
15326 /* Perform constant folding and related simplification of initializer
15327 expression EXPR. These behave identically to "fold_buildN" but ignore
15328 potential run-time traps and exceptions that fold must preserve. */
15329
15330 #define START_FOLD_INIT \
15331 int saved_signaling_nans = flag_signaling_nans;\
15332 int saved_trapping_math = flag_trapping_math;\
15333 int saved_rounding_math = flag_rounding_math;\
15334 int saved_trapv = flag_trapv;\
15335 int saved_folding_initializer = folding_initializer;\
15336 flag_signaling_nans = 0;\
15337 flag_trapping_math = 0;\
15338 flag_rounding_math = 0;\
15339 flag_trapv = 0;\
15340 folding_initializer = 1;
15341
15342 #define END_FOLD_INIT \
15343 flag_signaling_nans = saved_signaling_nans;\
15344 flag_trapping_math = saved_trapping_math;\
15345 flag_rounding_math = saved_rounding_math;\
15346 flag_trapv = saved_trapv;\
15347 folding_initializer = saved_folding_initializer;
15348
15349 tree
15350 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15351 tree type, tree op)
15352 {
15353 tree result;
15354 START_FOLD_INIT;
15355
15356 result = fold_build1_loc (loc, code, type, op);
15357
15358 END_FOLD_INIT;
15359 return result;
15360 }
15361
15362 tree
15363 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15364 tree type, tree op0, tree op1)
15365 {
15366 tree result;
15367 START_FOLD_INIT;
15368
15369 result = fold_build2_loc (loc, code, type, op0, op1);
15370
15371 END_FOLD_INIT;
15372 return result;
15373 }
15374
15375 tree
15376 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15377 int nargs, tree *argarray)
15378 {
15379 tree result;
15380 START_FOLD_INIT;
15381
15382 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15383
15384 END_FOLD_INIT;
15385 return result;
15386 }
15387
15388 #undef START_FOLD_INIT
15389 #undef END_FOLD_INIT
15390
15391 /* Determine if first argument is a multiple of second argument. Return 0 if
15392 it is not, or we cannot easily determined it to be.
15393
15394 An example of the sort of thing we care about (at this point; this routine
15395 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15396 fold cases do now) is discovering that
15397
15398 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15399
15400 is a multiple of
15401
15402 SAVE_EXPR (J * 8)
15403
15404 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15405
15406 This code also handles discovering that
15407
15408 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15409
15410 is a multiple of 8 so we don't have to worry about dealing with a
15411 possible remainder.
15412
15413 Note that we *look* inside a SAVE_EXPR only to determine how it was
15414 calculated; it is not safe for fold to do much of anything else with the
15415 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15416 at run time. For example, the latter example above *cannot* be implemented
15417 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15418 evaluation time of the original SAVE_EXPR is not necessarily the same at
15419 the time the new expression is evaluated. The only optimization of this
15420 sort that would be valid is changing
15421
15422 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15423
15424 divided by 8 to
15425
15426 SAVE_EXPR (I) * SAVE_EXPR (J)
15427
15428 (where the same SAVE_EXPR (J) is used in the original and the
15429 transformed version). */
15430
15431 int
15432 multiple_of_p (tree type, const_tree top, const_tree bottom)
15433 {
15434 if (operand_equal_p (top, bottom, 0))
15435 return 1;
15436
15437 if (TREE_CODE (type) != INTEGER_TYPE)
15438 return 0;
15439
15440 switch (TREE_CODE (top))
15441 {
15442 case BIT_AND_EXPR:
15443 /* Bitwise and provides a power of two multiple. If the mask is
15444 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15445 if (!integer_pow2p (bottom))
15446 return 0;
15447 /* FALLTHRU */
15448
15449 case MULT_EXPR:
15450 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15451 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15452
15453 case PLUS_EXPR:
15454 case MINUS_EXPR:
15455 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15456 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15457
15458 case LSHIFT_EXPR:
15459 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15460 {
15461 tree op1, t1;
15462
15463 op1 = TREE_OPERAND (top, 1);
15464 /* const_binop may not detect overflow correctly,
15465 so check for it explicitly here. */
15466 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15467 > TREE_INT_CST_LOW (op1)
15468 && TREE_INT_CST_HIGH (op1) == 0
15469 && 0 != (t1 = fold_convert (type,
15470 const_binop (LSHIFT_EXPR,
15471 size_one_node,
15472 op1)))
15473 && !TREE_OVERFLOW (t1))
15474 return multiple_of_p (type, t1, bottom);
15475 }
15476 return 0;
15477
15478 case NOP_EXPR:
15479 /* Can't handle conversions from non-integral or wider integral type. */
15480 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15481 || (TYPE_PRECISION (type)
15482 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15483 return 0;
15484
15485 /* .. fall through ... */
15486
15487 case SAVE_EXPR:
15488 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15489
15490 case COND_EXPR:
15491 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15492 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15493
15494 case INTEGER_CST:
15495 if (TREE_CODE (bottom) != INTEGER_CST
15496 || integer_zerop (bottom)
15497 || (TYPE_UNSIGNED (type)
15498 && (tree_int_cst_sgn (top) < 0
15499 || tree_int_cst_sgn (bottom) < 0)))
15500 return 0;
15501 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15502 top, bottom));
15503
15504 default:
15505 return 0;
15506 }
15507 }
15508
15509 /* Return true if CODE or TYPE is known to be non-negative. */
15510
15511 static bool
15512 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15513 {
15514 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15515 && truth_value_p (code))
15516 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15517 have a signed:1 type (where the value is -1 and 0). */
15518 return true;
15519 return false;
15520 }
15521
15522 /* Return true if (CODE OP0) is known to be non-negative. If the return
15523 value is based on the assumption that signed overflow is undefined,
15524 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15525 *STRICT_OVERFLOW_P. */
15526
15527 bool
15528 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15529 bool *strict_overflow_p)
15530 {
15531 if (TYPE_UNSIGNED (type))
15532 return true;
15533
15534 switch (code)
15535 {
15536 case ABS_EXPR:
15537 /* We can't return 1 if flag_wrapv is set because
15538 ABS_EXPR<INT_MIN> = INT_MIN. */
15539 if (!INTEGRAL_TYPE_P (type))
15540 return true;
15541 if (TYPE_OVERFLOW_UNDEFINED (type))
15542 {
15543 *strict_overflow_p = true;
15544 return true;
15545 }
15546 break;
15547
15548 case NON_LVALUE_EXPR:
15549 case FLOAT_EXPR:
15550 case FIX_TRUNC_EXPR:
15551 return tree_expr_nonnegative_warnv_p (op0,
15552 strict_overflow_p);
15553
15554 case NOP_EXPR:
15555 {
15556 tree inner_type = TREE_TYPE (op0);
15557 tree outer_type = type;
15558
15559 if (TREE_CODE (outer_type) == REAL_TYPE)
15560 {
15561 if (TREE_CODE (inner_type) == REAL_TYPE)
15562 return tree_expr_nonnegative_warnv_p (op0,
15563 strict_overflow_p);
15564 if (INTEGRAL_TYPE_P (inner_type))
15565 {
15566 if (TYPE_UNSIGNED (inner_type))
15567 return true;
15568 return tree_expr_nonnegative_warnv_p (op0,
15569 strict_overflow_p);
15570 }
15571 }
15572 else if (INTEGRAL_TYPE_P (outer_type))
15573 {
15574 if (TREE_CODE (inner_type) == REAL_TYPE)
15575 return tree_expr_nonnegative_warnv_p (op0,
15576 strict_overflow_p);
15577 if (INTEGRAL_TYPE_P (inner_type))
15578 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15579 && TYPE_UNSIGNED (inner_type);
15580 }
15581 }
15582 break;
15583
15584 default:
15585 return tree_simple_nonnegative_warnv_p (code, type);
15586 }
15587
15588 /* We don't know sign of `t', so be conservative and return false. */
15589 return false;
15590 }
15591
15592 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15593 value is based on the assumption that signed overflow is undefined,
15594 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15595 *STRICT_OVERFLOW_P. */
15596
15597 bool
15598 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15599 tree op1, bool *strict_overflow_p)
15600 {
15601 if (TYPE_UNSIGNED (type))
15602 return true;
15603
15604 switch (code)
15605 {
15606 case POINTER_PLUS_EXPR:
15607 case PLUS_EXPR:
15608 if (FLOAT_TYPE_P (type))
15609 return (tree_expr_nonnegative_warnv_p (op0,
15610 strict_overflow_p)
15611 && tree_expr_nonnegative_warnv_p (op1,
15612 strict_overflow_p));
15613
15614 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15615 both unsigned and at least 2 bits shorter than the result. */
15616 if (TREE_CODE (type) == INTEGER_TYPE
15617 && TREE_CODE (op0) == NOP_EXPR
15618 && TREE_CODE (op1) == NOP_EXPR)
15619 {
15620 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15621 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15622 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15623 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15624 {
15625 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15626 TYPE_PRECISION (inner2)) + 1;
15627 return prec < TYPE_PRECISION (type);
15628 }
15629 }
15630 break;
15631
15632 case MULT_EXPR:
15633 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15634 {
15635 /* x * x is always non-negative for floating point x
15636 or without overflow. */
15637 if (operand_equal_p (op0, op1, 0)
15638 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15639 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15640 {
15641 if (TYPE_OVERFLOW_UNDEFINED (type))
15642 *strict_overflow_p = true;
15643 return true;
15644 }
15645 }
15646
15647 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15648 both unsigned and their total bits is shorter than the result. */
15649 if (TREE_CODE (type) == INTEGER_TYPE
15650 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15651 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15652 {
15653 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15654 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15655 : TREE_TYPE (op0);
15656 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15657 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15658 : TREE_TYPE (op1);
15659
15660 bool unsigned0 = TYPE_UNSIGNED (inner0);
15661 bool unsigned1 = TYPE_UNSIGNED (inner1);
15662
15663 if (TREE_CODE (op0) == INTEGER_CST)
15664 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15665
15666 if (TREE_CODE (op1) == INTEGER_CST)
15667 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15668
15669 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15670 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15671 {
15672 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15673 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15674 : TYPE_PRECISION (inner0);
15675
15676 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15677 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15678 : TYPE_PRECISION (inner1);
15679
15680 return precision0 + precision1 < TYPE_PRECISION (type);
15681 }
15682 }
15683 return false;
15684
15685 case BIT_AND_EXPR:
15686 case MAX_EXPR:
15687 return (tree_expr_nonnegative_warnv_p (op0,
15688 strict_overflow_p)
15689 || tree_expr_nonnegative_warnv_p (op1,
15690 strict_overflow_p));
15691
15692 case BIT_IOR_EXPR:
15693 case BIT_XOR_EXPR:
15694 case MIN_EXPR:
15695 case RDIV_EXPR:
15696 case TRUNC_DIV_EXPR:
15697 case CEIL_DIV_EXPR:
15698 case FLOOR_DIV_EXPR:
15699 case ROUND_DIV_EXPR:
15700 return (tree_expr_nonnegative_warnv_p (op0,
15701 strict_overflow_p)
15702 && tree_expr_nonnegative_warnv_p (op1,
15703 strict_overflow_p));
15704
15705 case TRUNC_MOD_EXPR:
15706 case CEIL_MOD_EXPR:
15707 case FLOOR_MOD_EXPR:
15708 case ROUND_MOD_EXPR:
15709 return tree_expr_nonnegative_warnv_p (op0,
15710 strict_overflow_p);
15711 default:
15712 return tree_simple_nonnegative_warnv_p (code, type);
15713 }
15714
15715 /* We don't know sign of `t', so be conservative and return false. */
15716 return false;
15717 }
15718
15719 /* Return true if T is known to be non-negative. If the return
15720 value is based on the assumption that signed overflow is undefined,
15721 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15722 *STRICT_OVERFLOW_P. */
15723
15724 bool
15725 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15726 {
15727 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15728 return true;
15729
15730 switch (TREE_CODE (t))
15731 {
15732 case INTEGER_CST:
15733 return tree_int_cst_sgn (t) >= 0;
15734
15735 case REAL_CST:
15736 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15737
15738 case FIXED_CST:
15739 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15740
15741 case COND_EXPR:
15742 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15743 strict_overflow_p)
15744 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15745 strict_overflow_p));
15746 default:
15747 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15748 TREE_TYPE (t));
15749 }
15750 /* We don't know sign of `t', so be conservative and return false. */
15751 return false;
15752 }
15753
15754 /* Return true if T is known to be non-negative. If the return
15755 value is based on the assumption that signed overflow is undefined,
15756 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15757 *STRICT_OVERFLOW_P. */
15758
15759 bool
15760 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15761 tree arg0, tree arg1, bool *strict_overflow_p)
15762 {
15763 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15764 switch (DECL_FUNCTION_CODE (fndecl))
15765 {
15766 CASE_FLT_FN (BUILT_IN_ACOS):
15767 CASE_FLT_FN (BUILT_IN_ACOSH):
15768 CASE_FLT_FN (BUILT_IN_CABS):
15769 CASE_FLT_FN (BUILT_IN_COSH):
15770 CASE_FLT_FN (BUILT_IN_ERFC):
15771 CASE_FLT_FN (BUILT_IN_EXP):
15772 CASE_FLT_FN (BUILT_IN_EXP10):
15773 CASE_FLT_FN (BUILT_IN_EXP2):
15774 CASE_FLT_FN (BUILT_IN_FABS):
15775 CASE_FLT_FN (BUILT_IN_FDIM):
15776 CASE_FLT_FN (BUILT_IN_HYPOT):
15777 CASE_FLT_FN (BUILT_IN_POW10):
15778 CASE_INT_FN (BUILT_IN_FFS):
15779 CASE_INT_FN (BUILT_IN_PARITY):
15780 CASE_INT_FN (BUILT_IN_POPCOUNT):
15781 CASE_INT_FN (BUILT_IN_CLZ):
15782 CASE_INT_FN (BUILT_IN_CLRSB):
15783 case BUILT_IN_BSWAP32:
15784 case BUILT_IN_BSWAP64:
15785 /* Always true. */
15786 return true;
15787
15788 CASE_FLT_FN (BUILT_IN_SQRT):
15789 /* sqrt(-0.0) is -0.0. */
15790 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15791 return true;
15792 return tree_expr_nonnegative_warnv_p (arg0,
15793 strict_overflow_p);
15794
15795 CASE_FLT_FN (BUILT_IN_ASINH):
15796 CASE_FLT_FN (BUILT_IN_ATAN):
15797 CASE_FLT_FN (BUILT_IN_ATANH):
15798 CASE_FLT_FN (BUILT_IN_CBRT):
15799 CASE_FLT_FN (BUILT_IN_CEIL):
15800 CASE_FLT_FN (BUILT_IN_ERF):
15801 CASE_FLT_FN (BUILT_IN_EXPM1):
15802 CASE_FLT_FN (BUILT_IN_FLOOR):
15803 CASE_FLT_FN (BUILT_IN_FMOD):
15804 CASE_FLT_FN (BUILT_IN_FREXP):
15805 CASE_FLT_FN (BUILT_IN_ICEIL):
15806 CASE_FLT_FN (BUILT_IN_IFLOOR):
15807 CASE_FLT_FN (BUILT_IN_IRINT):
15808 CASE_FLT_FN (BUILT_IN_IROUND):
15809 CASE_FLT_FN (BUILT_IN_LCEIL):
15810 CASE_FLT_FN (BUILT_IN_LDEXP):
15811 CASE_FLT_FN (BUILT_IN_LFLOOR):
15812 CASE_FLT_FN (BUILT_IN_LLCEIL):
15813 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15814 CASE_FLT_FN (BUILT_IN_LLRINT):
15815 CASE_FLT_FN (BUILT_IN_LLROUND):
15816 CASE_FLT_FN (BUILT_IN_LRINT):
15817 CASE_FLT_FN (BUILT_IN_LROUND):
15818 CASE_FLT_FN (BUILT_IN_MODF):
15819 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15820 CASE_FLT_FN (BUILT_IN_RINT):
15821 CASE_FLT_FN (BUILT_IN_ROUND):
15822 CASE_FLT_FN (BUILT_IN_SCALB):
15823 CASE_FLT_FN (BUILT_IN_SCALBLN):
15824 CASE_FLT_FN (BUILT_IN_SCALBN):
15825 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15826 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15827 CASE_FLT_FN (BUILT_IN_SINH):
15828 CASE_FLT_FN (BUILT_IN_TANH):
15829 CASE_FLT_FN (BUILT_IN_TRUNC):
15830 /* True if the 1st argument is nonnegative. */
15831 return tree_expr_nonnegative_warnv_p (arg0,
15832 strict_overflow_p);
15833
15834 CASE_FLT_FN (BUILT_IN_FMAX):
15835 /* True if the 1st OR 2nd arguments are nonnegative. */
15836 return (tree_expr_nonnegative_warnv_p (arg0,
15837 strict_overflow_p)
15838 || (tree_expr_nonnegative_warnv_p (arg1,
15839 strict_overflow_p)));
15840
15841 CASE_FLT_FN (BUILT_IN_FMIN):
15842 /* True if the 1st AND 2nd arguments are nonnegative. */
15843 return (tree_expr_nonnegative_warnv_p (arg0,
15844 strict_overflow_p)
15845 && (tree_expr_nonnegative_warnv_p (arg1,
15846 strict_overflow_p)));
15847
15848 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15849 /* True if the 2nd argument is nonnegative. */
15850 return tree_expr_nonnegative_warnv_p (arg1,
15851 strict_overflow_p);
15852
15853 CASE_FLT_FN (BUILT_IN_POWI):
15854 /* True if the 1st argument is nonnegative or the second
15855 argument is an even integer. */
15856 if (TREE_CODE (arg1) == INTEGER_CST
15857 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15858 return true;
15859 return tree_expr_nonnegative_warnv_p (arg0,
15860 strict_overflow_p);
15861
15862 CASE_FLT_FN (BUILT_IN_POW):
15863 /* True if the 1st argument is nonnegative or the second
15864 argument is an even integer valued real. */
15865 if (TREE_CODE (arg1) == REAL_CST)
15866 {
15867 REAL_VALUE_TYPE c;
15868 HOST_WIDE_INT n;
15869
15870 c = TREE_REAL_CST (arg1);
15871 n = real_to_integer (&c);
15872 if ((n & 1) == 0)
15873 {
15874 REAL_VALUE_TYPE cint;
15875 real_from_integer (&cint, VOIDmode, n,
15876 n < 0 ? -1 : 0, 0);
15877 if (real_identical (&c, &cint))
15878 return true;
15879 }
15880 }
15881 return tree_expr_nonnegative_warnv_p (arg0,
15882 strict_overflow_p);
15883
15884 default:
15885 break;
15886 }
15887 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15888 type);
15889 }
15890
15891 /* Return true if T is known to be non-negative. If the return
15892 value is based on the assumption that signed overflow is undefined,
15893 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15894 *STRICT_OVERFLOW_P. */
15895
15896 static bool
15897 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15898 {
15899 enum tree_code code = TREE_CODE (t);
15900 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15901 return true;
15902
15903 switch (code)
15904 {
15905 case TARGET_EXPR:
15906 {
15907 tree temp = TARGET_EXPR_SLOT (t);
15908 t = TARGET_EXPR_INITIAL (t);
15909
15910 /* If the initializer is non-void, then it's a normal expression
15911 that will be assigned to the slot. */
15912 if (!VOID_TYPE_P (t))
15913 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15914
15915 /* Otherwise, the initializer sets the slot in some way. One common
15916 way is an assignment statement at the end of the initializer. */
15917 while (1)
15918 {
15919 if (TREE_CODE (t) == BIND_EXPR)
15920 t = expr_last (BIND_EXPR_BODY (t));
15921 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15922 || TREE_CODE (t) == TRY_CATCH_EXPR)
15923 t = expr_last (TREE_OPERAND (t, 0));
15924 else if (TREE_CODE (t) == STATEMENT_LIST)
15925 t = expr_last (t);
15926 else
15927 break;
15928 }
15929 if (TREE_CODE (t) == MODIFY_EXPR
15930 && TREE_OPERAND (t, 0) == temp)
15931 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15932 strict_overflow_p);
15933
15934 return false;
15935 }
15936
15937 case CALL_EXPR:
15938 {
15939 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15940 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15941
15942 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15943 get_callee_fndecl (t),
15944 arg0,
15945 arg1,
15946 strict_overflow_p);
15947 }
15948 case COMPOUND_EXPR:
15949 case MODIFY_EXPR:
15950 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15951 strict_overflow_p);
15952 case BIND_EXPR:
15953 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15954 strict_overflow_p);
15955 case SAVE_EXPR:
15956 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15957 strict_overflow_p);
15958
15959 default:
15960 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15961 TREE_TYPE (t));
15962 }
15963
15964 /* We don't know sign of `t', so be conservative and return false. */
15965 return false;
15966 }
15967
15968 /* Return true if T is known to be non-negative. If the return
15969 value is based on the assumption that signed overflow is undefined,
15970 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15971 *STRICT_OVERFLOW_P. */
15972
15973 bool
15974 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15975 {
15976 enum tree_code code;
15977 if (t == error_mark_node)
15978 return false;
15979
15980 code = TREE_CODE (t);
15981 switch (TREE_CODE_CLASS (code))
15982 {
15983 case tcc_binary:
15984 case tcc_comparison:
15985 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15986 TREE_TYPE (t),
15987 TREE_OPERAND (t, 0),
15988 TREE_OPERAND (t, 1),
15989 strict_overflow_p);
15990
15991 case tcc_unary:
15992 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15993 TREE_TYPE (t),
15994 TREE_OPERAND (t, 0),
15995 strict_overflow_p);
15996
15997 case tcc_constant:
15998 case tcc_declaration:
15999 case tcc_reference:
16000 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16001
16002 default:
16003 break;
16004 }
16005
16006 switch (code)
16007 {
16008 case TRUTH_AND_EXPR:
16009 case TRUTH_OR_EXPR:
16010 case TRUTH_XOR_EXPR:
16011 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16012 TREE_TYPE (t),
16013 TREE_OPERAND (t, 0),
16014 TREE_OPERAND (t, 1),
16015 strict_overflow_p);
16016 case TRUTH_NOT_EXPR:
16017 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16018 TREE_TYPE (t),
16019 TREE_OPERAND (t, 0),
16020 strict_overflow_p);
16021
16022 case COND_EXPR:
16023 case CONSTRUCTOR:
16024 case OBJ_TYPE_REF:
16025 case ASSERT_EXPR:
16026 case ADDR_EXPR:
16027 case WITH_SIZE_EXPR:
16028 case SSA_NAME:
16029 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16030
16031 default:
16032 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16033 }
16034 }
16035
16036 /* Return true if `t' is known to be non-negative. Handle warnings
16037 about undefined signed overflow. */
16038
16039 bool
16040 tree_expr_nonnegative_p (tree t)
16041 {
16042 bool ret, strict_overflow_p;
16043
16044 strict_overflow_p = false;
16045 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16046 if (strict_overflow_p)
16047 fold_overflow_warning (("assuming signed overflow does not occur when "
16048 "determining that expression is always "
16049 "non-negative"),
16050 WARN_STRICT_OVERFLOW_MISC);
16051 return ret;
16052 }
16053
16054
16055 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16056 For floating point we further ensure that T is not denormal.
16057 Similar logic is present in nonzero_address in rtlanal.h.
16058
16059 If the return value is based on the assumption that signed overflow
16060 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16061 change *STRICT_OVERFLOW_P. */
16062
16063 bool
16064 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16065 bool *strict_overflow_p)
16066 {
16067 switch (code)
16068 {
16069 case ABS_EXPR:
16070 return tree_expr_nonzero_warnv_p (op0,
16071 strict_overflow_p);
16072
16073 case NOP_EXPR:
16074 {
16075 tree inner_type = TREE_TYPE (op0);
16076 tree outer_type = type;
16077
16078 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16079 && tree_expr_nonzero_warnv_p (op0,
16080 strict_overflow_p));
16081 }
16082 break;
16083
16084 case NON_LVALUE_EXPR:
16085 return tree_expr_nonzero_warnv_p (op0,
16086 strict_overflow_p);
16087
16088 default:
16089 break;
16090 }
16091
16092 return false;
16093 }
16094
16095 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16096 For floating point we further ensure that T is not denormal.
16097 Similar logic is present in nonzero_address in rtlanal.h.
16098
16099 If the return value is based on the assumption that signed overflow
16100 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16101 change *STRICT_OVERFLOW_P. */
16102
16103 bool
16104 tree_binary_nonzero_warnv_p (enum tree_code code,
16105 tree type,
16106 tree op0,
16107 tree op1, bool *strict_overflow_p)
16108 {
16109 bool sub_strict_overflow_p;
16110 switch (code)
16111 {
16112 case POINTER_PLUS_EXPR:
16113 case PLUS_EXPR:
16114 if (TYPE_OVERFLOW_UNDEFINED (type))
16115 {
16116 /* With the presence of negative values it is hard
16117 to say something. */
16118 sub_strict_overflow_p = false;
16119 if (!tree_expr_nonnegative_warnv_p (op0,
16120 &sub_strict_overflow_p)
16121 || !tree_expr_nonnegative_warnv_p (op1,
16122 &sub_strict_overflow_p))
16123 return false;
16124 /* One of operands must be positive and the other non-negative. */
16125 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16126 overflows, on a twos-complement machine the sum of two
16127 nonnegative numbers can never be zero. */
16128 return (tree_expr_nonzero_warnv_p (op0,
16129 strict_overflow_p)
16130 || tree_expr_nonzero_warnv_p (op1,
16131 strict_overflow_p));
16132 }
16133 break;
16134
16135 case MULT_EXPR:
16136 if (TYPE_OVERFLOW_UNDEFINED (type))
16137 {
16138 if (tree_expr_nonzero_warnv_p (op0,
16139 strict_overflow_p)
16140 && tree_expr_nonzero_warnv_p (op1,
16141 strict_overflow_p))
16142 {
16143 *strict_overflow_p = true;
16144 return true;
16145 }
16146 }
16147 break;
16148
16149 case MIN_EXPR:
16150 sub_strict_overflow_p = false;
16151 if (tree_expr_nonzero_warnv_p (op0,
16152 &sub_strict_overflow_p)
16153 && tree_expr_nonzero_warnv_p (op1,
16154 &sub_strict_overflow_p))
16155 {
16156 if (sub_strict_overflow_p)
16157 *strict_overflow_p = true;
16158 }
16159 break;
16160
16161 case MAX_EXPR:
16162 sub_strict_overflow_p = false;
16163 if (tree_expr_nonzero_warnv_p (op0,
16164 &sub_strict_overflow_p))
16165 {
16166 if (sub_strict_overflow_p)
16167 *strict_overflow_p = true;
16168
16169 /* When both operands are nonzero, then MAX must be too. */
16170 if (tree_expr_nonzero_warnv_p (op1,
16171 strict_overflow_p))
16172 return true;
16173
16174 /* MAX where operand 0 is positive is positive. */
16175 return tree_expr_nonnegative_warnv_p (op0,
16176 strict_overflow_p);
16177 }
16178 /* MAX where operand 1 is positive is positive. */
16179 else if (tree_expr_nonzero_warnv_p (op1,
16180 &sub_strict_overflow_p)
16181 && tree_expr_nonnegative_warnv_p (op1,
16182 &sub_strict_overflow_p))
16183 {
16184 if (sub_strict_overflow_p)
16185 *strict_overflow_p = true;
16186 return true;
16187 }
16188 break;
16189
16190 case BIT_IOR_EXPR:
16191 return (tree_expr_nonzero_warnv_p (op1,
16192 strict_overflow_p)
16193 || tree_expr_nonzero_warnv_p (op0,
16194 strict_overflow_p));
16195
16196 default:
16197 break;
16198 }
16199
16200 return false;
16201 }
16202
16203 /* Return true when T is an address and is known to be nonzero.
16204 For floating point we further ensure that T is not denormal.
16205 Similar logic is present in nonzero_address in rtlanal.h.
16206
16207 If the return value is based on the assumption that signed overflow
16208 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16209 change *STRICT_OVERFLOW_P. */
16210
16211 bool
16212 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16213 {
16214 bool sub_strict_overflow_p;
16215 switch (TREE_CODE (t))
16216 {
16217 case INTEGER_CST:
16218 return !integer_zerop (t);
16219
16220 case ADDR_EXPR:
16221 {
16222 tree base = TREE_OPERAND (t, 0);
16223 if (!DECL_P (base))
16224 base = get_base_address (base);
16225
16226 if (!base)
16227 return false;
16228
16229 /* Weak declarations may link to NULL. Other things may also be NULL
16230 so protect with -fdelete-null-pointer-checks; but not variables
16231 allocated on the stack. */
16232 if (DECL_P (base)
16233 && (flag_delete_null_pointer_checks
16234 || (DECL_CONTEXT (base)
16235 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16236 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16237 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16238
16239 /* Constants are never weak. */
16240 if (CONSTANT_CLASS_P (base))
16241 return true;
16242
16243 return false;
16244 }
16245
16246 case COND_EXPR:
16247 sub_strict_overflow_p = false;
16248 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16249 &sub_strict_overflow_p)
16250 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16251 &sub_strict_overflow_p))
16252 {
16253 if (sub_strict_overflow_p)
16254 *strict_overflow_p = true;
16255 return true;
16256 }
16257 break;
16258
16259 default:
16260 break;
16261 }
16262 return false;
16263 }
16264
16265 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16266 attempt to fold the expression to a constant without modifying TYPE,
16267 OP0 or OP1.
16268
16269 If the expression could be simplified to a constant, then return
16270 the constant. If the expression would not be simplified to a
16271 constant, then return NULL_TREE. */
16272
16273 tree
16274 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16275 {
16276 tree tem = fold_binary (code, type, op0, op1);
16277 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16278 }
16279
16280 /* Given the components of a unary expression CODE, TYPE and OP0,
16281 attempt to fold the expression to a constant without modifying
16282 TYPE or OP0.
16283
16284 If the expression could be simplified to a constant, then return
16285 the constant. If the expression would not be simplified to a
16286 constant, then return NULL_TREE. */
16287
16288 tree
16289 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16290 {
16291 tree tem = fold_unary (code, type, op0);
16292 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16293 }
16294
16295 /* If EXP represents referencing an element in a constant string
16296 (either via pointer arithmetic or array indexing), return the
16297 tree representing the value accessed, otherwise return NULL. */
16298
16299 tree
16300 fold_read_from_constant_string (tree exp)
16301 {
16302 if ((TREE_CODE (exp) == INDIRECT_REF
16303 || TREE_CODE (exp) == ARRAY_REF)
16304 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16305 {
16306 tree exp1 = TREE_OPERAND (exp, 0);
16307 tree index;
16308 tree string;
16309 location_t loc = EXPR_LOCATION (exp);
16310
16311 if (TREE_CODE (exp) == INDIRECT_REF)
16312 string = string_constant (exp1, &index);
16313 else
16314 {
16315 tree low_bound = array_ref_low_bound (exp);
16316 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16317
16318 /* Optimize the special-case of a zero lower bound.
16319
16320 We convert the low_bound to sizetype to avoid some problems
16321 with constant folding. (E.g. suppose the lower bound is 1,
16322 and its mode is QI. Without the conversion,l (ARRAY
16323 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16324 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16325 if (! integer_zerop (low_bound))
16326 index = size_diffop_loc (loc, index,
16327 fold_convert_loc (loc, sizetype, low_bound));
16328
16329 string = exp1;
16330 }
16331
16332 if (string
16333 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16334 && TREE_CODE (string) == STRING_CST
16335 && TREE_CODE (index) == INTEGER_CST
16336 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16337 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16338 == MODE_INT)
16339 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16340 return build_int_cst_type (TREE_TYPE (exp),
16341 (TREE_STRING_POINTER (string)
16342 [TREE_INT_CST_LOW (index)]));
16343 }
16344 return NULL;
16345 }
16346
16347 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16348 an integer constant, real, or fixed-point constant.
16349
16350 TYPE is the type of the result. */
16351
16352 static tree
16353 fold_negate_const (tree arg0, tree type)
16354 {
16355 tree t = NULL_TREE;
16356
16357 switch (TREE_CODE (arg0))
16358 {
16359 case INTEGER_CST:
16360 {
16361 double_int val = tree_to_double_int (arg0);
16362 bool overflow;
16363 val = val.neg_with_overflow (&overflow);
16364 t = force_fit_type_double (type, val, 1,
16365 (overflow | TREE_OVERFLOW (arg0))
16366 && !TYPE_UNSIGNED (type));
16367 break;
16368 }
16369
16370 case REAL_CST:
16371 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16372 break;
16373
16374 case FIXED_CST:
16375 {
16376 FIXED_VALUE_TYPE f;
16377 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16378 &(TREE_FIXED_CST (arg0)), NULL,
16379 TYPE_SATURATING (type));
16380 t = build_fixed (type, f);
16381 /* Propagate overflow flags. */
16382 if (overflow_p | TREE_OVERFLOW (arg0))
16383 TREE_OVERFLOW (t) = 1;
16384 break;
16385 }
16386
16387 default:
16388 gcc_unreachable ();
16389 }
16390
16391 return t;
16392 }
16393
16394 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16395 an integer constant or real constant.
16396
16397 TYPE is the type of the result. */
16398
16399 tree
16400 fold_abs_const (tree arg0, tree type)
16401 {
16402 tree t = NULL_TREE;
16403
16404 switch (TREE_CODE (arg0))
16405 {
16406 case INTEGER_CST:
16407 {
16408 double_int val = tree_to_double_int (arg0);
16409
16410 /* If the value is unsigned or non-negative, then the absolute value
16411 is the same as the ordinary value. */
16412 if (TYPE_UNSIGNED (type)
16413 || !val.is_negative ())
16414 t = arg0;
16415
16416 /* If the value is negative, then the absolute value is
16417 its negation. */
16418 else
16419 {
16420 bool overflow;
16421 val = val.neg_with_overflow (&overflow);
16422 t = force_fit_type_double (type, val, -1,
16423 overflow | TREE_OVERFLOW (arg0));
16424 }
16425 }
16426 break;
16427
16428 case REAL_CST:
16429 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16430 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16431 else
16432 t = arg0;
16433 break;
16434
16435 default:
16436 gcc_unreachable ();
16437 }
16438
16439 return t;
16440 }
16441
16442 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16443 constant. TYPE is the type of the result. */
16444
16445 static tree
16446 fold_not_const (const_tree arg0, tree type)
16447 {
16448 double_int val;
16449
16450 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16451
16452 val = ~tree_to_double_int (arg0);
16453 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16454 }
16455
16456 /* Given CODE, a relational operator, the target type, TYPE and two
16457 constant operands OP0 and OP1, return the result of the
16458 relational operation. If the result is not a compile time
16459 constant, then return NULL_TREE. */
16460
16461 static tree
16462 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16463 {
16464 int result, invert;
16465
16466 /* From here on, the only cases we handle are when the result is
16467 known to be a constant. */
16468
16469 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16470 {
16471 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16472 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16473
16474 /* Handle the cases where either operand is a NaN. */
16475 if (real_isnan (c0) || real_isnan (c1))
16476 {
16477 switch (code)
16478 {
16479 case EQ_EXPR:
16480 case ORDERED_EXPR:
16481 result = 0;
16482 break;
16483
16484 case NE_EXPR:
16485 case UNORDERED_EXPR:
16486 case UNLT_EXPR:
16487 case UNLE_EXPR:
16488 case UNGT_EXPR:
16489 case UNGE_EXPR:
16490 case UNEQ_EXPR:
16491 result = 1;
16492 break;
16493
16494 case LT_EXPR:
16495 case LE_EXPR:
16496 case GT_EXPR:
16497 case GE_EXPR:
16498 case LTGT_EXPR:
16499 if (flag_trapping_math)
16500 return NULL_TREE;
16501 result = 0;
16502 break;
16503
16504 default:
16505 gcc_unreachable ();
16506 }
16507
16508 return constant_boolean_node (result, type);
16509 }
16510
16511 return constant_boolean_node (real_compare (code, c0, c1), type);
16512 }
16513
16514 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16515 {
16516 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16517 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16518 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16519 }
16520
16521 /* Handle equality/inequality of complex constants. */
16522 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16523 {
16524 tree rcond = fold_relational_const (code, type,
16525 TREE_REALPART (op0),
16526 TREE_REALPART (op1));
16527 tree icond = fold_relational_const (code, type,
16528 TREE_IMAGPART (op0),
16529 TREE_IMAGPART (op1));
16530 if (code == EQ_EXPR)
16531 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16532 else if (code == NE_EXPR)
16533 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16534 else
16535 return NULL_TREE;
16536 }
16537
16538 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16539 {
16540 unsigned count = VECTOR_CST_NELTS (op0);
16541 tree *elts = XALLOCAVEC (tree, count);
16542 gcc_assert (VECTOR_CST_NELTS (op1) == count
16543 && TYPE_VECTOR_SUBPARTS (type) == count);
16544
16545 for (unsigned i = 0; i < count; i++)
16546 {
16547 tree elem_type = TREE_TYPE (type);
16548 tree elem0 = VECTOR_CST_ELT (op0, i);
16549 tree elem1 = VECTOR_CST_ELT (op1, i);
16550
16551 tree tem = fold_relational_const (code, elem_type,
16552 elem0, elem1);
16553
16554 if (tem == NULL_TREE)
16555 return NULL_TREE;
16556
16557 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16558 }
16559
16560 return build_vector (type, elts);
16561 }
16562
16563 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16564
16565 To compute GT, swap the arguments and do LT.
16566 To compute GE, do LT and invert the result.
16567 To compute LE, swap the arguments, do LT and invert the result.
16568 To compute NE, do EQ and invert the result.
16569
16570 Therefore, the code below must handle only EQ and LT. */
16571
16572 if (code == LE_EXPR || code == GT_EXPR)
16573 {
16574 tree tem = op0;
16575 op0 = op1;
16576 op1 = tem;
16577 code = swap_tree_comparison (code);
16578 }
16579
16580 /* Note that it is safe to invert for real values here because we
16581 have already handled the one case that it matters. */
16582
16583 invert = 0;
16584 if (code == NE_EXPR || code == GE_EXPR)
16585 {
16586 invert = 1;
16587 code = invert_tree_comparison (code, false);
16588 }
16589
16590 /* Compute a result for LT or EQ if args permit;
16591 Otherwise return T. */
16592 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16593 {
16594 if (code == EQ_EXPR)
16595 result = tree_int_cst_equal (op0, op1);
16596 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16597 result = INT_CST_LT_UNSIGNED (op0, op1);
16598 else
16599 result = INT_CST_LT (op0, op1);
16600 }
16601 else
16602 return NULL_TREE;
16603
16604 if (invert)
16605 result ^= 1;
16606 return constant_boolean_node (result, type);
16607 }
16608
16609 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16610 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16611 itself. */
16612
16613 tree
16614 fold_build_cleanup_point_expr (tree type, tree expr)
16615 {
16616 /* If the expression does not have side effects then we don't have to wrap
16617 it with a cleanup point expression. */
16618 if (!TREE_SIDE_EFFECTS (expr))
16619 return expr;
16620
16621 /* If the expression is a return, check to see if the expression inside the
16622 return has no side effects or the right hand side of the modify expression
16623 inside the return. If either don't have side effects set we don't need to
16624 wrap the expression in a cleanup point expression. Note we don't check the
16625 left hand side of the modify because it should always be a return decl. */
16626 if (TREE_CODE (expr) == RETURN_EXPR)
16627 {
16628 tree op = TREE_OPERAND (expr, 0);
16629 if (!op || !TREE_SIDE_EFFECTS (op))
16630 return expr;
16631 op = TREE_OPERAND (op, 1);
16632 if (!TREE_SIDE_EFFECTS (op))
16633 return expr;
16634 }
16635
16636 return build1 (CLEANUP_POINT_EXPR, type, expr);
16637 }
16638
16639 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16640 of an indirection through OP0, or NULL_TREE if no simplification is
16641 possible. */
16642
16643 tree
16644 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16645 {
16646 tree sub = op0;
16647 tree subtype;
16648
16649 STRIP_NOPS (sub);
16650 subtype = TREE_TYPE (sub);
16651 if (!POINTER_TYPE_P (subtype))
16652 return NULL_TREE;
16653
16654 if (TREE_CODE (sub) == ADDR_EXPR)
16655 {
16656 tree op = TREE_OPERAND (sub, 0);
16657 tree optype = TREE_TYPE (op);
16658 /* *&CONST_DECL -> to the value of the const decl. */
16659 if (TREE_CODE (op) == CONST_DECL)
16660 return DECL_INITIAL (op);
16661 /* *&p => p; make sure to handle *&"str"[cst] here. */
16662 if (type == optype)
16663 {
16664 tree fop = fold_read_from_constant_string (op);
16665 if (fop)
16666 return fop;
16667 else
16668 return op;
16669 }
16670 /* *(foo *)&fooarray => fooarray[0] */
16671 else if (TREE_CODE (optype) == ARRAY_TYPE
16672 && type == TREE_TYPE (optype)
16673 && (!in_gimple_form
16674 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16675 {
16676 tree type_domain = TYPE_DOMAIN (optype);
16677 tree min_val = size_zero_node;
16678 if (type_domain && TYPE_MIN_VALUE (type_domain))
16679 min_val = TYPE_MIN_VALUE (type_domain);
16680 if (in_gimple_form
16681 && TREE_CODE (min_val) != INTEGER_CST)
16682 return NULL_TREE;
16683 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16684 NULL_TREE, NULL_TREE);
16685 }
16686 /* *(foo *)&complexfoo => __real__ complexfoo */
16687 else if (TREE_CODE (optype) == COMPLEX_TYPE
16688 && type == TREE_TYPE (optype))
16689 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16690 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16691 else if (TREE_CODE (optype) == VECTOR_TYPE
16692 && type == TREE_TYPE (optype))
16693 {
16694 tree part_width = TYPE_SIZE (type);
16695 tree index = bitsize_int (0);
16696 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16697 }
16698 }
16699
16700 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16701 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16702 {
16703 tree op00 = TREE_OPERAND (sub, 0);
16704 tree op01 = TREE_OPERAND (sub, 1);
16705
16706 STRIP_NOPS (op00);
16707 if (TREE_CODE (op00) == ADDR_EXPR)
16708 {
16709 tree op00type;
16710 op00 = TREE_OPERAND (op00, 0);
16711 op00type = TREE_TYPE (op00);
16712
16713 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16714 if (TREE_CODE (op00type) == VECTOR_TYPE
16715 && type == TREE_TYPE (op00type))
16716 {
16717 HOST_WIDE_INT offset = tree_to_shwi (op01);
16718 tree part_width = TYPE_SIZE (type);
16719 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
16720 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16721 tree index = bitsize_int (indexi);
16722
16723 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16724 return fold_build3_loc (loc,
16725 BIT_FIELD_REF, type, op00,
16726 part_width, index);
16727
16728 }
16729 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16730 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16731 && type == TREE_TYPE (op00type))
16732 {
16733 tree size = TYPE_SIZE_UNIT (type);
16734 if (tree_int_cst_equal (size, op01))
16735 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16736 }
16737 /* ((foo *)&fooarray)[1] => fooarray[1] */
16738 else if (TREE_CODE (op00type) == ARRAY_TYPE
16739 && type == TREE_TYPE (op00type))
16740 {
16741 tree type_domain = TYPE_DOMAIN (op00type);
16742 tree min_val = size_zero_node;
16743 if (type_domain && TYPE_MIN_VALUE (type_domain))
16744 min_val = TYPE_MIN_VALUE (type_domain);
16745 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16746 TYPE_SIZE_UNIT (type));
16747 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16748 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16749 NULL_TREE, NULL_TREE);
16750 }
16751 }
16752 }
16753
16754 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16755 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16756 && type == TREE_TYPE (TREE_TYPE (subtype))
16757 && (!in_gimple_form
16758 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16759 {
16760 tree type_domain;
16761 tree min_val = size_zero_node;
16762 sub = build_fold_indirect_ref_loc (loc, sub);
16763 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16764 if (type_domain && TYPE_MIN_VALUE (type_domain))
16765 min_val = TYPE_MIN_VALUE (type_domain);
16766 if (in_gimple_form
16767 && TREE_CODE (min_val) != INTEGER_CST)
16768 return NULL_TREE;
16769 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16770 NULL_TREE);
16771 }
16772
16773 return NULL_TREE;
16774 }
16775
16776 /* Builds an expression for an indirection through T, simplifying some
16777 cases. */
16778
16779 tree
16780 build_fold_indirect_ref_loc (location_t loc, tree t)
16781 {
16782 tree type = TREE_TYPE (TREE_TYPE (t));
16783 tree sub = fold_indirect_ref_1 (loc, type, t);
16784
16785 if (sub)
16786 return sub;
16787
16788 return build1_loc (loc, INDIRECT_REF, type, t);
16789 }
16790
16791 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16792
16793 tree
16794 fold_indirect_ref_loc (location_t loc, tree t)
16795 {
16796 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16797
16798 if (sub)
16799 return sub;
16800 else
16801 return t;
16802 }
16803
16804 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16805 whose result is ignored. The type of the returned tree need not be
16806 the same as the original expression. */
16807
16808 tree
16809 fold_ignored_result (tree t)
16810 {
16811 if (!TREE_SIDE_EFFECTS (t))
16812 return integer_zero_node;
16813
16814 for (;;)
16815 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16816 {
16817 case tcc_unary:
16818 t = TREE_OPERAND (t, 0);
16819 break;
16820
16821 case tcc_binary:
16822 case tcc_comparison:
16823 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16824 t = TREE_OPERAND (t, 0);
16825 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16826 t = TREE_OPERAND (t, 1);
16827 else
16828 return t;
16829 break;
16830
16831 case tcc_expression:
16832 switch (TREE_CODE (t))
16833 {
16834 case COMPOUND_EXPR:
16835 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16836 return t;
16837 t = TREE_OPERAND (t, 0);
16838 break;
16839
16840 case COND_EXPR:
16841 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16842 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16843 return t;
16844 t = TREE_OPERAND (t, 0);
16845 break;
16846
16847 default:
16848 return t;
16849 }
16850 break;
16851
16852 default:
16853 return t;
16854 }
16855 }
16856
16857 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16858 This can only be applied to objects of a sizetype. */
16859
16860 tree
16861 round_up_loc (location_t loc, tree value, int divisor)
16862 {
16863 tree div = NULL_TREE;
16864
16865 gcc_assert (divisor > 0);
16866 if (divisor == 1)
16867 return value;
16868
16869 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16870 have to do anything. Only do this when we are not given a const,
16871 because in that case, this check is more expensive than just
16872 doing it. */
16873 if (TREE_CODE (value) != INTEGER_CST)
16874 {
16875 div = build_int_cst (TREE_TYPE (value), divisor);
16876
16877 if (multiple_of_p (TREE_TYPE (value), value, div))
16878 return value;
16879 }
16880
16881 /* If divisor is a power of two, simplify this to bit manipulation. */
16882 if (divisor == (divisor & -divisor))
16883 {
16884 if (TREE_CODE (value) == INTEGER_CST)
16885 {
16886 double_int val = tree_to_double_int (value);
16887 bool overflow_p;
16888
16889 if ((val.low & (divisor - 1)) == 0)
16890 return value;
16891
16892 overflow_p = TREE_OVERFLOW (value);
16893 val.low &= ~(divisor - 1);
16894 val.low += divisor;
16895 if (val.low == 0)
16896 {
16897 val.high++;
16898 if (val.high == 0)
16899 overflow_p = true;
16900 }
16901
16902 return force_fit_type_double (TREE_TYPE (value), val,
16903 -1, overflow_p);
16904 }
16905 else
16906 {
16907 tree t;
16908
16909 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16910 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16911 t = build_int_cst (TREE_TYPE (value), -divisor);
16912 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16913 }
16914 }
16915 else
16916 {
16917 if (!div)
16918 div = build_int_cst (TREE_TYPE (value), divisor);
16919 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16920 value = size_binop_loc (loc, MULT_EXPR, value, div);
16921 }
16922
16923 return value;
16924 }
16925
16926 /* Likewise, but round down. */
16927
16928 tree
16929 round_down_loc (location_t loc, tree value, int divisor)
16930 {
16931 tree div = NULL_TREE;
16932
16933 gcc_assert (divisor > 0);
16934 if (divisor == 1)
16935 return value;
16936
16937 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16938 have to do anything. Only do this when we are not given a const,
16939 because in that case, this check is more expensive than just
16940 doing it. */
16941 if (TREE_CODE (value) != INTEGER_CST)
16942 {
16943 div = build_int_cst (TREE_TYPE (value), divisor);
16944
16945 if (multiple_of_p (TREE_TYPE (value), value, div))
16946 return value;
16947 }
16948
16949 /* If divisor is a power of two, simplify this to bit manipulation. */
16950 if (divisor == (divisor & -divisor))
16951 {
16952 tree t;
16953
16954 t = build_int_cst (TREE_TYPE (value), -divisor);
16955 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16956 }
16957 else
16958 {
16959 if (!div)
16960 div = build_int_cst (TREE_TYPE (value), divisor);
16961 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16962 value = size_binop_loc (loc, MULT_EXPR, value, div);
16963 }
16964
16965 return value;
16966 }
16967
16968 /* Returns the pointer to the base of the object addressed by EXP and
16969 extracts the information about the offset of the access, storing it
16970 to PBITPOS and POFFSET. */
16971
16972 static tree
16973 split_address_to_core_and_offset (tree exp,
16974 HOST_WIDE_INT *pbitpos, tree *poffset)
16975 {
16976 tree core;
16977 enum machine_mode mode;
16978 int unsignedp, volatilep;
16979 HOST_WIDE_INT bitsize;
16980 location_t loc = EXPR_LOCATION (exp);
16981
16982 if (TREE_CODE (exp) == ADDR_EXPR)
16983 {
16984 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16985 poffset, &mode, &unsignedp, &volatilep);
16986 core = build_fold_addr_expr_loc (loc, core);
16987 }
16988 else
16989 {
16990 core = exp;
16991 *pbitpos = 0;
16992 *poffset = NULL_TREE;
16993 }
16994
16995 return core;
16996 }
16997
16998 /* Returns true if addresses of E1 and E2 differ by a constant, false
16999 otherwise. If they do, E1 - E2 is stored in *DIFF. */
17000
17001 bool
17002 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
17003 {
17004 tree core1, core2;
17005 HOST_WIDE_INT bitpos1, bitpos2;
17006 tree toffset1, toffset2, tdiff, type;
17007
17008 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17009 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17010
17011 if (bitpos1 % BITS_PER_UNIT != 0
17012 || bitpos2 % BITS_PER_UNIT != 0
17013 || !operand_equal_p (core1, core2, 0))
17014 return false;
17015
17016 if (toffset1 && toffset2)
17017 {
17018 type = TREE_TYPE (toffset1);
17019 if (type != TREE_TYPE (toffset2))
17020 toffset2 = fold_convert (type, toffset2);
17021
17022 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17023 if (!cst_and_fits_in_hwi (tdiff))
17024 return false;
17025
17026 *diff = int_cst_value (tdiff);
17027 }
17028 else if (toffset1 || toffset2)
17029 {
17030 /* If only one of the offsets is non-constant, the difference cannot
17031 be a constant. */
17032 return false;
17033 }
17034 else
17035 *diff = 0;
17036
17037 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17038 return true;
17039 }
17040
17041 /* Simplify the floating point expression EXP when the sign of the
17042 result is not significant. Return NULL_TREE if no simplification
17043 is possible. */
17044
17045 tree
17046 fold_strip_sign_ops (tree exp)
17047 {
17048 tree arg0, arg1;
17049 location_t loc = EXPR_LOCATION (exp);
17050
17051 switch (TREE_CODE (exp))
17052 {
17053 case ABS_EXPR:
17054 case NEGATE_EXPR:
17055 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17056 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17057
17058 case MULT_EXPR:
17059 case RDIV_EXPR:
17060 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17061 return NULL_TREE;
17062 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17063 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17064 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17065 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17066 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17067 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17068 break;
17069
17070 case COMPOUND_EXPR:
17071 arg0 = TREE_OPERAND (exp, 0);
17072 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17073 if (arg1)
17074 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17075 break;
17076
17077 case COND_EXPR:
17078 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17079 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17080 if (arg0 || arg1)
17081 return fold_build3_loc (loc,
17082 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17083 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17084 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17085 break;
17086
17087 case CALL_EXPR:
17088 {
17089 const enum built_in_function fcode = builtin_mathfn_code (exp);
17090 switch (fcode)
17091 {
17092 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17093 /* Strip copysign function call, return the 1st argument. */
17094 arg0 = CALL_EXPR_ARG (exp, 0);
17095 arg1 = CALL_EXPR_ARG (exp, 1);
17096 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17097
17098 default:
17099 /* Strip sign ops from the argument of "odd" math functions. */
17100 if (negate_mathfn_p (fcode))
17101 {
17102 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17103 if (arg0)
17104 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17105 }
17106 break;
17107 }
17108 }
17109 break;
17110
17111 default:
17112 break;
17113 }
17114 return NULL_TREE;
17115 }