re PR target/29776 (result of ffs/clz/ctz/popcount/parity are already sign-extended)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166 \f
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case VECTOR_CST:
425 {
426 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
427 return true;
428
429 int count = TYPE_VECTOR_SUBPARTS (type), i;
430
431 for (i = 0; i < count; i++)
432 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
433 return false;
434
435 return true;
436 }
437
438 case COMPLEX_EXPR:
439 return negate_expr_p (TREE_OPERAND (t, 0))
440 && negate_expr_p (TREE_OPERAND (t, 1));
441
442 case CONJ_EXPR:
443 return negate_expr_p (TREE_OPERAND (t, 0));
444
445 case PLUS_EXPR:
446 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
447 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
448 return false;
449 /* -(A + B) -> (-B) - A. */
450 if (negate_expr_p (TREE_OPERAND (t, 1))
451 && reorder_operands_p (TREE_OPERAND (t, 0),
452 TREE_OPERAND (t, 1)))
453 return true;
454 /* -(A + B) -> (-A) - B. */
455 return negate_expr_p (TREE_OPERAND (t, 0));
456
457 case MINUS_EXPR:
458 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
459 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
460 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
461 && reorder_operands_p (TREE_OPERAND (t, 0),
462 TREE_OPERAND (t, 1));
463
464 case MULT_EXPR:
465 if (TYPE_UNSIGNED (TREE_TYPE (t)))
466 break;
467
468 /* Fall through. */
469
470 case RDIV_EXPR:
471 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
472 return negate_expr_p (TREE_OPERAND (t, 1))
473 || negate_expr_p (TREE_OPERAND (t, 0));
474 break;
475
476 case TRUNC_DIV_EXPR:
477 case ROUND_DIV_EXPR:
478 case FLOOR_DIV_EXPR:
479 case CEIL_DIV_EXPR:
480 case EXACT_DIV_EXPR:
481 /* In general we can't negate A / B, because if A is INT_MIN and
482 B is 1, we may turn this into INT_MIN / -1 which is undefined
483 and actually traps on some architectures. But if overflow is
484 undefined, we can negate, because - (INT_MIN / 1) is an
485 overflow. */
486 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
487 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
488 break;
489 return negate_expr_p (TREE_OPERAND (t, 1))
490 || negate_expr_p (TREE_OPERAND (t, 0));
491
492 case NOP_EXPR:
493 /* Negate -((double)float) as (double)(-float). */
494 if (TREE_CODE (type) == REAL_TYPE)
495 {
496 tree tem = strip_float_extensions (t);
497 if (tem != t)
498 return negate_expr_p (tem);
499 }
500 break;
501
502 case CALL_EXPR:
503 /* Negate -f(x) as f(-x). */
504 if (negate_mathfn_p (builtin_mathfn_code (t)))
505 return negate_expr_p (CALL_EXPR_ARG (t, 0));
506 break;
507
508 case RSHIFT_EXPR:
509 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
510 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
511 {
512 tree op1 = TREE_OPERAND (t, 1);
513 if (TREE_INT_CST_HIGH (op1) == 0
514 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
515 == TREE_INT_CST_LOW (op1))
516 return true;
517 }
518 break;
519
520 default:
521 break;
522 }
523 return false;
524 }
525
526 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
527 simplification is possible.
528 If negate_expr_p would return true for T, NULL_TREE will never be
529 returned. */
530
531 static tree
532 fold_negate_expr (location_t loc, tree t)
533 {
534 tree type = TREE_TYPE (t);
535 tree tem;
536
537 switch (TREE_CODE (t))
538 {
539 /* Convert - (~A) to A + 1. */
540 case BIT_NOT_EXPR:
541 if (INTEGRAL_TYPE_P (type))
542 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
543 build_one_cst (type));
544 break;
545
546 case INTEGER_CST:
547 tem = fold_negate_const (t, type);
548 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
549 || !TYPE_OVERFLOW_TRAPS (type))
550 return tem;
551 break;
552
553 case REAL_CST:
554 tem = fold_negate_const (t, type);
555 /* Two's complement FP formats, such as c4x, may overflow. */
556 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
557 return tem;
558 break;
559
560 case FIXED_CST:
561 tem = fold_negate_const (t, type);
562 return tem;
563
564 case COMPLEX_CST:
565 {
566 tree rpart = negate_expr (TREE_REALPART (t));
567 tree ipart = negate_expr (TREE_IMAGPART (t));
568
569 if ((TREE_CODE (rpart) == REAL_CST
570 && TREE_CODE (ipart) == REAL_CST)
571 || (TREE_CODE (rpart) == INTEGER_CST
572 && TREE_CODE (ipart) == INTEGER_CST))
573 return build_complex (type, rpart, ipart);
574 }
575 break;
576
577 case VECTOR_CST:
578 {
579 int count = TYPE_VECTOR_SUBPARTS (type), i;
580 tree *elts = XALLOCAVEC (tree, count);
581
582 for (i = 0; i < count; i++)
583 {
584 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
585 if (elts[i] == NULL_TREE)
586 return NULL_TREE;
587 }
588
589 return build_vector (type, elts);
590 }
591
592 case COMPLEX_EXPR:
593 if (negate_expr_p (t))
594 return fold_build2_loc (loc, COMPLEX_EXPR, type,
595 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
596 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
597 break;
598
599 case CONJ_EXPR:
600 if (negate_expr_p (t))
601 return fold_build1_loc (loc, CONJ_EXPR, type,
602 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
603 break;
604
605 case NEGATE_EXPR:
606 return TREE_OPERAND (t, 0);
607
608 case PLUS_EXPR:
609 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
610 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
611 {
612 /* -(A + B) -> (-B) - A. */
613 if (negate_expr_p (TREE_OPERAND (t, 1))
614 && reorder_operands_p (TREE_OPERAND (t, 0),
615 TREE_OPERAND (t, 1)))
616 {
617 tem = negate_expr (TREE_OPERAND (t, 1));
618 return fold_build2_loc (loc, MINUS_EXPR, type,
619 tem, TREE_OPERAND (t, 0));
620 }
621
622 /* -(A + B) -> (-A) - B. */
623 if (negate_expr_p (TREE_OPERAND (t, 0)))
624 {
625 tem = negate_expr (TREE_OPERAND (t, 0));
626 return fold_build2_loc (loc, MINUS_EXPR, type,
627 tem, TREE_OPERAND (t, 1));
628 }
629 }
630 break;
631
632 case MINUS_EXPR:
633 /* - (A - B) -> B - A */
634 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
635 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
636 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
637 return fold_build2_loc (loc, MINUS_EXPR, type,
638 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
639 break;
640
641 case MULT_EXPR:
642 if (TYPE_UNSIGNED (type))
643 break;
644
645 /* Fall through. */
646
647 case RDIV_EXPR:
648 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
649 {
650 tem = TREE_OPERAND (t, 1);
651 if (negate_expr_p (tem))
652 return fold_build2_loc (loc, TREE_CODE (t), type,
653 TREE_OPERAND (t, 0), negate_expr (tem));
654 tem = TREE_OPERAND (t, 0);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 negate_expr (tem), TREE_OPERAND (t, 1));
658 }
659 break;
660
661 case TRUNC_DIV_EXPR:
662 case ROUND_DIV_EXPR:
663 case FLOOR_DIV_EXPR:
664 case CEIL_DIV_EXPR:
665 case EXACT_DIV_EXPR:
666 /* In general we can't negate A / B, because if A is INT_MIN and
667 B is 1, we may turn this into INT_MIN / -1 which is undefined
668 and actually traps on some architectures. But if overflow is
669 undefined, we can negate, because - (INT_MIN / 1) is an
670 overflow. */
671 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
672 {
673 const char * const warnmsg = G_("assuming signed overflow does not "
674 "occur when negating a division");
675 tem = TREE_OPERAND (t, 1);
676 if (negate_expr_p (tem))
677 {
678 if (INTEGRAL_TYPE_P (type)
679 && (TREE_CODE (tem) != INTEGER_CST
680 || integer_onep (tem)))
681 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 TREE_OPERAND (t, 0), negate_expr (tem));
684 }
685 tem = TREE_OPERAND (t, 0);
686 if (negate_expr_p (tem))
687 {
688 if (INTEGRAL_TYPE_P (type)
689 && (TREE_CODE (tem) != INTEGER_CST
690 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
691 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
692 return fold_build2_loc (loc, TREE_CODE (t), type,
693 negate_expr (tem), TREE_OPERAND (t, 1));
694 }
695 }
696 break;
697
698 case NOP_EXPR:
699 /* Convert -((double)float) into (double)(-float). */
700 if (TREE_CODE (type) == REAL_TYPE)
701 {
702 tem = strip_float_extensions (t);
703 if (tem != t && negate_expr_p (tem))
704 return fold_convert_loc (loc, type, negate_expr (tem));
705 }
706 break;
707
708 case CALL_EXPR:
709 /* Negate -f(x) as f(-x). */
710 if (negate_mathfn_p (builtin_mathfn_code (t))
711 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
712 {
713 tree fndecl, arg;
714
715 fndecl = get_callee_fndecl (t);
716 arg = negate_expr (CALL_EXPR_ARG (t, 0));
717 return build_call_expr_loc (loc, fndecl, 1, arg);
718 }
719 break;
720
721 case RSHIFT_EXPR:
722 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
723 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
724 {
725 tree op1 = TREE_OPERAND (t, 1);
726 if (TREE_INT_CST_HIGH (op1) == 0
727 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
728 == TREE_INT_CST_LOW (op1))
729 {
730 tree ntype = TYPE_UNSIGNED (type)
731 ? signed_type_for (type)
732 : unsigned_type_for (type);
733 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
734 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
735 return fold_convert_loc (loc, type, temp);
736 }
737 }
738 break;
739
740 default:
741 break;
742 }
743
744 return NULL_TREE;
745 }
746
747 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
748 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
749 return NULL_TREE. */
750
751 static tree
752 negate_expr (tree t)
753 {
754 tree type, tem;
755 location_t loc;
756
757 if (t == NULL_TREE)
758 return NULL_TREE;
759
760 loc = EXPR_LOCATION (t);
761 type = TREE_TYPE (t);
762 STRIP_SIGN_NOPS (t);
763
764 tem = fold_negate_expr (loc, t);
765 if (!tem)
766 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
767 return fold_convert_loc (loc, type, tem);
768 }
769 \f
770 /* Split a tree IN into a constant, literal and variable parts that could be
771 combined with CODE to make IN. "constant" means an expression with
772 TREE_CONSTANT but that isn't an actual constant. CODE must be a
773 commutative arithmetic operation. Store the constant part into *CONP,
774 the literal in *LITP and return the variable part. If a part isn't
775 present, set it to null. If the tree does not decompose in this way,
776 return the entire tree as the variable part and the other parts as null.
777
778 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
779 case, we negate an operand that was subtracted. Except if it is a
780 literal for which we use *MINUS_LITP instead.
781
782 If NEGATE_P is true, we are negating all of IN, again except a literal
783 for which we use *MINUS_LITP instead.
784
785 If IN is itself a literal or constant, return it as appropriate.
786
787 Note that we do not guarantee that any of the three values will be the
788 same type as IN, but they will have the same signedness and mode. */
789
790 static tree
791 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
792 tree *minus_litp, int negate_p)
793 {
794 tree var = 0;
795
796 *conp = 0;
797 *litp = 0;
798 *minus_litp = 0;
799
800 /* Strip any conversions that don't change the machine mode or signedness. */
801 STRIP_SIGN_NOPS (in);
802
803 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
804 || TREE_CODE (in) == FIXED_CST)
805 *litp = in;
806 else if (TREE_CODE (in) == code
807 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
808 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
809 /* We can associate addition and subtraction together (even
810 though the C standard doesn't say so) for integers because
811 the value is not affected. For reals, the value might be
812 affected, so we can't. */
813 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
814 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
815 {
816 tree op0 = TREE_OPERAND (in, 0);
817 tree op1 = TREE_OPERAND (in, 1);
818 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
819 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
820
821 /* First see if either of the operands is a literal, then a constant. */
822 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
823 || TREE_CODE (op0) == FIXED_CST)
824 *litp = op0, op0 = 0;
825 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
826 || TREE_CODE (op1) == FIXED_CST)
827 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
828
829 if (op0 != 0 && TREE_CONSTANT (op0))
830 *conp = op0, op0 = 0;
831 else if (op1 != 0 && TREE_CONSTANT (op1))
832 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
833
834 /* If we haven't dealt with either operand, this is not a case we can
835 decompose. Otherwise, VAR is either of the ones remaining, if any. */
836 if (op0 != 0 && op1 != 0)
837 var = in;
838 else if (op0 != 0)
839 var = op0;
840 else
841 var = op1, neg_var_p = neg1_p;
842
843 /* Now do any needed negations. */
844 if (neg_litp_p)
845 *minus_litp = *litp, *litp = 0;
846 if (neg_conp_p)
847 *conp = negate_expr (*conp);
848 if (neg_var_p)
849 var = negate_expr (var);
850 }
851 else if (TREE_CODE (in) == BIT_NOT_EXPR
852 && code == PLUS_EXPR)
853 {
854 /* -X - 1 is folded to ~X, undo that here. */
855 *minus_litp = build_one_cst (TREE_TYPE (in));
856 var = negate_expr (TREE_OPERAND (in, 0));
857 }
858 else if (TREE_CONSTANT (in))
859 *conp = in;
860 else
861 var = in;
862
863 if (negate_p)
864 {
865 if (*litp)
866 *minus_litp = *litp, *litp = 0;
867 else if (*minus_litp)
868 *litp = *minus_litp, *minus_litp = 0;
869 *conp = negate_expr (*conp);
870 var = negate_expr (var);
871 }
872
873 return var;
874 }
875
876 /* Re-associate trees split by the above function. T1 and T2 are
877 either expressions to associate or null. Return the new
878 expression, if any. LOC is the location of the new expression. If
879 we build an operation, do it in TYPE and with CODE. */
880
881 static tree
882 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
883 {
884 if (t1 == 0)
885 return t2;
886 else if (t2 == 0)
887 return t1;
888
889 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
890 try to fold this since we will have infinite recursion. But do
891 deal with any NEGATE_EXPRs. */
892 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
893 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
894 {
895 if (code == PLUS_EXPR)
896 {
897 if (TREE_CODE (t1) == NEGATE_EXPR)
898 return build2_loc (loc, MINUS_EXPR, type,
899 fold_convert_loc (loc, type, t2),
900 fold_convert_loc (loc, type,
901 TREE_OPERAND (t1, 0)));
902 else if (TREE_CODE (t2) == NEGATE_EXPR)
903 return build2_loc (loc, MINUS_EXPR, type,
904 fold_convert_loc (loc, type, t1),
905 fold_convert_loc (loc, type,
906 TREE_OPERAND (t2, 0)));
907 else if (integer_zerop (t2))
908 return fold_convert_loc (loc, type, t1);
909 }
910 else if (code == MINUS_EXPR)
911 {
912 if (integer_zerop (t2))
913 return fold_convert_loc (loc, type, t1);
914 }
915
916 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
917 fold_convert_loc (loc, type, t2));
918 }
919
920 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
921 fold_convert_loc (loc, type, t2));
922 }
923 \f
924 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
925 for use in int_const_binop, size_binop and size_diffop. */
926
927 static bool
928 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
929 {
930 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
931 return false;
932 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
933 return false;
934
935 switch (code)
936 {
937 case LSHIFT_EXPR:
938 case RSHIFT_EXPR:
939 case LROTATE_EXPR:
940 case RROTATE_EXPR:
941 return true;
942
943 default:
944 break;
945 }
946
947 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
948 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
949 && TYPE_MODE (type1) == TYPE_MODE (type2);
950 }
951
952
953 /* Combine two integer constants ARG1 and ARG2 under operation CODE
954 to produce a new constant. Return NULL_TREE if we don't know how
955 to evaluate CODE at compile-time. */
956
957 static tree
958 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
959 int overflowable)
960 {
961 double_int op1, op2, res, tmp;
962 tree t;
963 tree type = TREE_TYPE (arg1);
964 bool uns = TYPE_UNSIGNED (type);
965 bool overflow = false;
966
967 op1 = tree_to_double_int (arg1);
968 op2 = tree_to_double_int (arg2);
969
970 switch (code)
971 {
972 case BIT_IOR_EXPR:
973 res = op1 | op2;
974 break;
975
976 case BIT_XOR_EXPR:
977 res = op1 ^ op2;
978 break;
979
980 case BIT_AND_EXPR:
981 res = op1 & op2;
982 break;
983
984 case RSHIFT_EXPR:
985 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
986 break;
987
988 case LSHIFT_EXPR:
989 /* It's unclear from the C standard whether shifts can overflow.
990 The following code ignores overflow; perhaps a C standard
991 interpretation ruling is needed. */
992 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
993 break;
994
995 case RROTATE_EXPR:
996 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
997 break;
998
999 case LROTATE_EXPR:
1000 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1001 break;
1002
1003 case PLUS_EXPR:
1004 res = op1.add_with_sign (op2, false, &overflow);
1005 break;
1006
1007 case MINUS_EXPR:
1008 res = op1.sub_with_overflow (op2, &overflow);
1009 break;
1010
1011 case MULT_EXPR:
1012 res = op1.mul_with_sign (op2, false, &overflow);
1013 break;
1014
1015 case MULT_HIGHPART_EXPR:
1016 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1017 {
1018 bool dummy_overflow;
1019 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1020 return NULL_TREE;
1021 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1022 }
1023 else
1024 {
1025 bool dummy_overflow;
1026 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1027 is performed in twice the precision of arguments. */
1028 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1029 res = tmp.rshift (TYPE_PRECISION (type),
1030 2 * TYPE_PRECISION (type), !uns);
1031 }
1032 break;
1033
1034 case TRUNC_DIV_EXPR:
1035 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1036 case EXACT_DIV_EXPR:
1037 /* This is a shortcut for a common special case. */
1038 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1039 && !TREE_OVERFLOW (arg1)
1040 && !TREE_OVERFLOW (arg2)
1041 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1042 {
1043 if (code == CEIL_DIV_EXPR)
1044 op1.low += op2.low - 1;
1045
1046 res.low = op1.low / op2.low, res.high = 0;
1047 break;
1048 }
1049
1050 /* ... fall through ... */
1051
1052 case ROUND_DIV_EXPR:
1053 if (op2.is_zero ())
1054 return NULL_TREE;
1055 if (op2.is_one ())
1056 {
1057 res = op1;
1058 break;
1059 }
1060 if (op1 == op2 && !op1.is_zero ())
1061 {
1062 res = double_int_one;
1063 break;
1064 }
1065 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1066 break;
1067
1068 case TRUNC_MOD_EXPR:
1069 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1070 /* This is a shortcut for a common special case. */
1071 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1072 && !TREE_OVERFLOW (arg1)
1073 && !TREE_OVERFLOW (arg2)
1074 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1075 {
1076 if (code == CEIL_MOD_EXPR)
1077 op1.low += op2.low - 1;
1078 res.low = op1.low % op2.low, res.high = 0;
1079 break;
1080 }
1081
1082 /* ... fall through ... */
1083
1084 case ROUND_MOD_EXPR:
1085 if (op2.is_zero ())
1086 return NULL_TREE;
1087 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1088 break;
1089
1090 case MIN_EXPR:
1091 res = op1.min (op2, uns);
1092 break;
1093
1094 case MAX_EXPR:
1095 res = op1.max (op2, uns);
1096 break;
1097
1098 default:
1099 return NULL_TREE;
1100 }
1101
1102 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1103 (!uns && overflow)
1104 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1105
1106 return t;
1107 }
1108
1109 tree
1110 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1111 {
1112 return int_const_binop_1 (code, arg1, arg2, 1);
1113 }
1114
1115 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1116 constant. We assume ARG1 and ARG2 have the same data type, or at least
1117 are the same kind of constant and the same machine mode. Return zero if
1118 combining the constants is not allowed in the current operating mode. */
1119
1120 static tree
1121 const_binop (enum tree_code code, tree arg1, tree arg2)
1122 {
1123 /* Sanity check for the recursive cases. */
1124 if (!arg1 || !arg2)
1125 return NULL_TREE;
1126
1127 STRIP_NOPS (arg1);
1128 STRIP_NOPS (arg2);
1129
1130 if (TREE_CODE (arg1) == INTEGER_CST)
1131 return int_const_binop (code, arg1, arg2);
1132
1133 if (TREE_CODE (arg1) == REAL_CST)
1134 {
1135 enum machine_mode mode;
1136 REAL_VALUE_TYPE d1;
1137 REAL_VALUE_TYPE d2;
1138 REAL_VALUE_TYPE value;
1139 REAL_VALUE_TYPE result;
1140 bool inexact;
1141 tree t, type;
1142
1143 /* The following codes are handled by real_arithmetic. */
1144 switch (code)
1145 {
1146 case PLUS_EXPR:
1147 case MINUS_EXPR:
1148 case MULT_EXPR:
1149 case RDIV_EXPR:
1150 case MIN_EXPR:
1151 case MAX_EXPR:
1152 break;
1153
1154 default:
1155 return NULL_TREE;
1156 }
1157
1158 d1 = TREE_REAL_CST (arg1);
1159 d2 = TREE_REAL_CST (arg2);
1160
1161 type = TREE_TYPE (arg1);
1162 mode = TYPE_MODE (type);
1163
1164 /* Don't perform operation if we honor signaling NaNs and
1165 either operand is a NaN. */
1166 if (HONOR_SNANS (mode)
1167 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1168 return NULL_TREE;
1169
1170 /* Don't perform operation if it would raise a division
1171 by zero exception. */
1172 if (code == RDIV_EXPR
1173 && REAL_VALUES_EQUAL (d2, dconst0)
1174 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1175 return NULL_TREE;
1176
1177 /* If either operand is a NaN, just return it. Otherwise, set up
1178 for floating-point trap; we return an overflow. */
1179 if (REAL_VALUE_ISNAN (d1))
1180 return arg1;
1181 else if (REAL_VALUE_ISNAN (d2))
1182 return arg2;
1183
1184 inexact = real_arithmetic (&value, code, &d1, &d2);
1185 real_convert (&result, mode, &value);
1186
1187 /* Don't constant fold this floating point operation if
1188 the result has overflowed and flag_trapping_math. */
1189 if (flag_trapping_math
1190 && MODE_HAS_INFINITIES (mode)
1191 && REAL_VALUE_ISINF (result)
1192 && !REAL_VALUE_ISINF (d1)
1193 && !REAL_VALUE_ISINF (d2))
1194 return NULL_TREE;
1195
1196 /* Don't constant fold this floating point operation if the
1197 result may dependent upon the run-time rounding mode and
1198 flag_rounding_math is set, or if GCC's software emulation
1199 is unable to accurately represent the result. */
1200 if ((flag_rounding_math
1201 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1202 && (inexact || !real_identical (&result, &value)))
1203 return NULL_TREE;
1204
1205 t = build_real (type, result);
1206
1207 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1208 return t;
1209 }
1210
1211 if (TREE_CODE (arg1) == FIXED_CST)
1212 {
1213 FIXED_VALUE_TYPE f1;
1214 FIXED_VALUE_TYPE f2;
1215 FIXED_VALUE_TYPE result;
1216 tree t, type;
1217 int sat_p;
1218 bool overflow_p;
1219
1220 /* The following codes are handled by fixed_arithmetic. */
1221 switch (code)
1222 {
1223 case PLUS_EXPR:
1224 case MINUS_EXPR:
1225 case MULT_EXPR:
1226 case TRUNC_DIV_EXPR:
1227 f2 = TREE_FIXED_CST (arg2);
1228 break;
1229
1230 case LSHIFT_EXPR:
1231 case RSHIFT_EXPR:
1232 f2.data.high = TREE_INT_CST_HIGH (arg2);
1233 f2.data.low = TREE_INT_CST_LOW (arg2);
1234 f2.mode = SImode;
1235 break;
1236
1237 default:
1238 return NULL_TREE;
1239 }
1240
1241 f1 = TREE_FIXED_CST (arg1);
1242 type = TREE_TYPE (arg1);
1243 sat_p = TYPE_SATURATING (type);
1244 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1245 t = build_fixed (type, result);
1246 /* Propagate overflow flags. */
1247 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1248 TREE_OVERFLOW (t) = 1;
1249 return t;
1250 }
1251
1252 if (TREE_CODE (arg1) == COMPLEX_CST)
1253 {
1254 tree type = TREE_TYPE (arg1);
1255 tree r1 = TREE_REALPART (arg1);
1256 tree i1 = TREE_IMAGPART (arg1);
1257 tree r2 = TREE_REALPART (arg2);
1258 tree i2 = TREE_IMAGPART (arg2);
1259 tree real, imag;
1260
1261 switch (code)
1262 {
1263 case PLUS_EXPR:
1264 case MINUS_EXPR:
1265 real = const_binop (code, r1, r2);
1266 imag = const_binop (code, i1, i2);
1267 break;
1268
1269 case MULT_EXPR:
1270 if (COMPLEX_FLOAT_TYPE_P (type))
1271 return do_mpc_arg2 (arg1, arg2, type,
1272 /* do_nonfinite= */ folding_initializer,
1273 mpc_mul);
1274
1275 real = const_binop (MINUS_EXPR,
1276 const_binop (MULT_EXPR, r1, r2),
1277 const_binop (MULT_EXPR, i1, i2));
1278 imag = const_binop (PLUS_EXPR,
1279 const_binop (MULT_EXPR, r1, i2),
1280 const_binop (MULT_EXPR, i1, r2));
1281 break;
1282
1283 case RDIV_EXPR:
1284 if (COMPLEX_FLOAT_TYPE_P (type))
1285 return do_mpc_arg2 (arg1, arg2, type,
1286 /* do_nonfinite= */ folding_initializer,
1287 mpc_div);
1288 /* Fallthru ... */
1289 case TRUNC_DIV_EXPR:
1290 case CEIL_DIV_EXPR:
1291 case FLOOR_DIV_EXPR:
1292 case ROUND_DIV_EXPR:
1293 if (flag_complex_method == 0)
1294 {
1295 /* Keep this algorithm in sync with
1296 tree-complex.c:expand_complex_div_straight().
1297
1298 Expand complex division to scalars, straightforward algorithm.
1299 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1300 t = br*br + bi*bi
1301 */
1302 tree magsquared
1303 = const_binop (PLUS_EXPR,
1304 const_binop (MULT_EXPR, r2, r2),
1305 const_binop (MULT_EXPR, i2, i2));
1306 tree t1
1307 = const_binop (PLUS_EXPR,
1308 const_binop (MULT_EXPR, r1, r2),
1309 const_binop (MULT_EXPR, i1, i2));
1310 tree t2
1311 = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, i1, r2),
1313 const_binop (MULT_EXPR, r1, i2));
1314
1315 real = const_binop (code, t1, magsquared);
1316 imag = const_binop (code, t2, magsquared);
1317 }
1318 else
1319 {
1320 /* Keep this algorithm in sync with
1321 tree-complex.c:expand_complex_div_wide().
1322
1323 Expand complex division to scalars, modified algorithm to minimize
1324 overflow with wide input ranges. */
1325 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1326 fold_abs_const (r2, TREE_TYPE (type)),
1327 fold_abs_const (i2, TREE_TYPE (type)));
1328
1329 if (integer_nonzerop (compare))
1330 {
1331 /* In the TRUE branch, we compute
1332 ratio = br/bi;
1333 div = (br * ratio) + bi;
1334 tr = (ar * ratio) + ai;
1335 ti = (ai * ratio) - ar;
1336 tr = tr / div;
1337 ti = ti / div; */
1338 tree ratio = const_binop (code, r2, i2);
1339 tree div = const_binop (PLUS_EXPR, i2,
1340 const_binop (MULT_EXPR, r2, ratio));
1341 real = const_binop (MULT_EXPR, r1, ratio);
1342 real = const_binop (PLUS_EXPR, real, i1);
1343 real = const_binop (code, real, div);
1344
1345 imag = const_binop (MULT_EXPR, i1, ratio);
1346 imag = const_binop (MINUS_EXPR, imag, r1);
1347 imag = const_binop (code, imag, div);
1348 }
1349 else
1350 {
1351 /* In the FALSE branch, we compute
1352 ratio = d/c;
1353 divisor = (d * ratio) + c;
1354 tr = (b * ratio) + a;
1355 ti = b - (a * ratio);
1356 tr = tr / div;
1357 ti = ti / div; */
1358 tree ratio = const_binop (code, i2, r2);
1359 tree div = const_binop (PLUS_EXPR, r2,
1360 const_binop (MULT_EXPR, i2, ratio));
1361
1362 real = const_binop (MULT_EXPR, i1, ratio);
1363 real = const_binop (PLUS_EXPR, real, r1);
1364 real = const_binop (code, real, div);
1365
1366 imag = const_binop (MULT_EXPR, r1, ratio);
1367 imag = const_binop (MINUS_EXPR, i1, imag);
1368 imag = const_binop (code, imag, div);
1369 }
1370 }
1371 break;
1372
1373 default:
1374 return NULL_TREE;
1375 }
1376
1377 if (real && imag)
1378 return build_complex (type, real, imag);
1379 }
1380
1381 if (TREE_CODE (arg1) == VECTOR_CST
1382 && TREE_CODE (arg2) == VECTOR_CST)
1383 {
1384 tree type = TREE_TYPE (arg1);
1385 int count = TYPE_VECTOR_SUBPARTS (type), i;
1386 tree *elts = XALLOCAVEC (tree, count);
1387
1388 for (i = 0; i < count; i++)
1389 {
1390 tree elem1 = VECTOR_CST_ELT (arg1, i);
1391 tree elem2 = VECTOR_CST_ELT (arg2, i);
1392
1393 elts[i] = const_binop (code, elem1, elem2);
1394
1395 /* It is possible that const_binop cannot handle the given
1396 code and return NULL_TREE */
1397 if (elts[i] == NULL_TREE)
1398 return NULL_TREE;
1399 }
1400
1401 return build_vector (type, elts);
1402 }
1403
1404 /* Shifts allow a scalar offset for a vector. */
1405 if (TREE_CODE (arg1) == VECTOR_CST
1406 && TREE_CODE (arg2) == INTEGER_CST)
1407 {
1408 tree type = TREE_TYPE (arg1);
1409 int count = TYPE_VECTOR_SUBPARTS (type), i;
1410 tree *elts = XALLOCAVEC (tree, count);
1411
1412 if (code == VEC_LSHIFT_EXPR
1413 || code == VEC_RSHIFT_EXPR)
1414 {
1415 if (!host_integerp (arg2, 1))
1416 return NULL_TREE;
1417
1418 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1419 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1420 unsigned HOST_WIDE_INT innerc
1421 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1422 if (shiftc >= outerc || (shiftc % innerc) != 0)
1423 return NULL_TREE;
1424 int offset = shiftc / innerc;
1425 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1426 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1427 for !BYTES_BIG_ENDIAN picks first vector element, but
1428 for BYTES_BIG_ENDIAN last element from the vector. */
1429 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1430 offset = -offset;
1431 tree zero = build_zero_cst (TREE_TYPE (type));
1432 for (i = 0; i < count; i++)
1433 {
1434 if (i + offset < 0 || i + offset >= count)
1435 elts[i] = zero;
1436 else
1437 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1438 }
1439 }
1440 else
1441 for (i = 0; i < count; i++)
1442 {
1443 tree elem1 = VECTOR_CST_ELT (arg1, i);
1444
1445 elts[i] = const_binop (code, elem1, arg2);
1446
1447 /* It is possible that const_binop cannot handle the given
1448 code and return NULL_TREE */
1449 if (elts[i] == NULL_TREE)
1450 return NULL_TREE;
1451 }
1452
1453 return build_vector (type, elts);
1454 }
1455 return NULL_TREE;
1456 }
1457
1458 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1459 indicates which particular sizetype to create. */
1460
1461 tree
1462 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1463 {
1464 return build_int_cst (sizetype_tab[(int) kind], number);
1465 }
1466 \f
1467 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1468 is a tree code. The type of the result is taken from the operands.
1469 Both must be equivalent integer types, ala int_binop_types_match_p.
1470 If the operands are constant, so is the result. */
1471
1472 tree
1473 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1474 {
1475 tree type = TREE_TYPE (arg0);
1476
1477 if (arg0 == error_mark_node || arg1 == error_mark_node)
1478 return error_mark_node;
1479
1480 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1481 TREE_TYPE (arg1)));
1482
1483 /* Handle the special case of two integer constants faster. */
1484 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1485 {
1486 /* And some specific cases even faster than that. */
1487 if (code == PLUS_EXPR)
1488 {
1489 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1490 return arg1;
1491 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1492 return arg0;
1493 }
1494 else if (code == MINUS_EXPR)
1495 {
1496 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1497 return arg0;
1498 }
1499 else if (code == MULT_EXPR)
1500 {
1501 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1502 return arg1;
1503 }
1504
1505 /* Handle general case of two integer constants. For sizetype
1506 constant calculations we always want to know about overflow,
1507 even in the unsigned case. */
1508 return int_const_binop_1 (code, arg0, arg1, -1);
1509 }
1510
1511 return fold_build2_loc (loc, code, type, arg0, arg1);
1512 }
1513
1514 /* Given two values, either both of sizetype or both of bitsizetype,
1515 compute the difference between the two values. Return the value
1516 in signed type corresponding to the type of the operands. */
1517
1518 tree
1519 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1520 {
1521 tree type = TREE_TYPE (arg0);
1522 tree ctype;
1523
1524 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1525 TREE_TYPE (arg1)));
1526
1527 /* If the type is already signed, just do the simple thing. */
1528 if (!TYPE_UNSIGNED (type))
1529 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1530
1531 if (type == sizetype)
1532 ctype = ssizetype;
1533 else if (type == bitsizetype)
1534 ctype = sbitsizetype;
1535 else
1536 ctype = signed_type_for (type);
1537
1538 /* If either operand is not a constant, do the conversions to the signed
1539 type and subtract. The hardware will do the right thing with any
1540 overflow in the subtraction. */
1541 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1542 return size_binop_loc (loc, MINUS_EXPR,
1543 fold_convert_loc (loc, ctype, arg0),
1544 fold_convert_loc (loc, ctype, arg1));
1545
1546 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1547 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1548 overflow) and negate (which can't either). Special-case a result
1549 of zero while we're here. */
1550 if (tree_int_cst_equal (arg0, arg1))
1551 return build_int_cst (ctype, 0);
1552 else if (tree_int_cst_lt (arg1, arg0))
1553 return fold_convert_loc (loc, ctype,
1554 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1555 else
1556 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1557 fold_convert_loc (loc, ctype,
1558 size_binop_loc (loc,
1559 MINUS_EXPR,
1560 arg1, arg0)));
1561 }
1562 \f
1563 /* A subroutine of fold_convert_const handling conversions of an
1564 INTEGER_CST to another integer type. */
1565
1566 static tree
1567 fold_convert_const_int_from_int (tree type, const_tree arg1)
1568 {
1569 tree t;
1570
1571 /* Given an integer constant, make new constant with new type,
1572 appropriately sign-extended or truncated. */
1573 t = force_fit_type_double (type, tree_to_double_int (arg1),
1574 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1575 (TREE_INT_CST_HIGH (arg1) < 0
1576 && (TYPE_UNSIGNED (type)
1577 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1578 | TREE_OVERFLOW (arg1));
1579
1580 return t;
1581 }
1582
1583 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1584 to an integer type. */
1585
1586 static tree
1587 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1588 {
1589 int overflow = 0;
1590 tree t;
1591
1592 /* The following code implements the floating point to integer
1593 conversion rules required by the Java Language Specification,
1594 that IEEE NaNs are mapped to zero and values that overflow
1595 the target precision saturate, i.e. values greater than
1596 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1597 are mapped to INT_MIN. These semantics are allowed by the
1598 C and C++ standards that simply state that the behavior of
1599 FP-to-integer conversion is unspecified upon overflow. */
1600
1601 double_int val;
1602 REAL_VALUE_TYPE r;
1603 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1604
1605 switch (code)
1606 {
1607 case FIX_TRUNC_EXPR:
1608 real_trunc (&r, VOIDmode, &x);
1609 break;
1610
1611 default:
1612 gcc_unreachable ();
1613 }
1614
1615 /* If R is NaN, return zero and show we have an overflow. */
1616 if (REAL_VALUE_ISNAN (r))
1617 {
1618 overflow = 1;
1619 val = double_int_zero;
1620 }
1621
1622 /* See if R is less than the lower bound or greater than the
1623 upper bound. */
1624
1625 if (! overflow)
1626 {
1627 tree lt = TYPE_MIN_VALUE (type);
1628 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1629 if (REAL_VALUES_LESS (r, l))
1630 {
1631 overflow = 1;
1632 val = tree_to_double_int (lt);
1633 }
1634 }
1635
1636 if (! overflow)
1637 {
1638 tree ut = TYPE_MAX_VALUE (type);
1639 if (ut)
1640 {
1641 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1642 if (REAL_VALUES_LESS (u, r))
1643 {
1644 overflow = 1;
1645 val = tree_to_double_int (ut);
1646 }
1647 }
1648 }
1649
1650 if (! overflow)
1651 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1652
1653 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1654 return t;
1655 }
1656
1657 /* A subroutine of fold_convert_const handling conversions of a
1658 FIXED_CST to an integer type. */
1659
1660 static tree
1661 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1662 {
1663 tree t;
1664 double_int temp, temp_trunc;
1665 unsigned int mode;
1666
1667 /* Right shift FIXED_CST to temp by fbit. */
1668 temp = TREE_FIXED_CST (arg1).data;
1669 mode = TREE_FIXED_CST (arg1).mode;
1670 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1671 {
1672 temp = temp.rshift (GET_MODE_FBIT (mode),
1673 HOST_BITS_PER_DOUBLE_INT,
1674 SIGNED_FIXED_POINT_MODE_P (mode));
1675
1676 /* Left shift temp to temp_trunc by fbit. */
1677 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1678 HOST_BITS_PER_DOUBLE_INT,
1679 SIGNED_FIXED_POINT_MODE_P (mode));
1680 }
1681 else
1682 {
1683 temp = double_int_zero;
1684 temp_trunc = double_int_zero;
1685 }
1686
1687 /* If FIXED_CST is negative, we need to round the value toward 0.
1688 By checking if the fractional bits are not zero to add 1 to temp. */
1689 if (SIGNED_FIXED_POINT_MODE_P (mode)
1690 && temp_trunc.is_negative ()
1691 && TREE_FIXED_CST (arg1).data != temp_trunc)
1692 temp += double_int_one;
1693
1694 /* Given a fixed-point constant, make new constant with new type,
1695 appropriately sign-extended or truncated. */
1696 t = force_fit_type_double (type, temp, -1,
1697 (temp.is_negative ()
1698 && (TYPE_UNSIGNED (type)
1699 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1700 | TREE_OVERFLOW (arg1));
1701
1702 return t;
1703 }
1704
1705 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1706 to another floating point type. */
1707
1708 static tree
1709 fold_convert_const_real_from_real (tree type, const_tree arg1)
1710 {
1711 REAL_VALUE_TYPE value;
1712 tree t;
1713
1714 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1715 t = build_real (type, value);
1716
1717 /* If converting an infinity or NAN to a representation that doesn't
1718 have one, set the overflow bit so that we can produce some kind of
1719 error message at the appropriate point if necessary. It's not the
1720 most user-friendly message, but it's better than nothing. */
1721 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1722 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1723 TREE_OVERFLOW (t) = 1;
1724 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1725 && !MODE_HAS_NANS (TYPE_MODE (type)))
1726 TREE_OVERFLOW (t) = 1;
1727 /* Regular overflow, conversion produced an infinity in a mode that
1728 can't represent them. */
1729 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1730 && REAL_VALUE_ISINF (value)
1731 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1732 TREE_OVERFLOW (t) = 1;
1733 else
1734 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1735 return t;
1736 }
1737
1738 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1739 to a floating point type. */
1740
1741 static tree
1742 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1743 {
1744 REAL_VALUE_TYPE value;
1745 tree t;
1746
1747 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1748 t = build_real (type, value);
1749
1750 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1751 return t;
1752 }
1753
1754 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1755 to another fixed-point type. */
1756
1757 static tree
1758 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1759 {
1760 FIXED_VALUE_TYPE value;
1761 tree t;
1762 bool overflow_p;
1763
1764 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1765 TYPE_SATURATING (type));
1766 t = build_fixed (type, value);
1767
1768 /* Propagate overflow flags. */
1769 if (overflow_p | TREE_OVERFLOW (arg1))
1770 TREE_OVERFLOW (t) = 1;
1771 return t;
1772 }
1773
1774 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1775 to a fixed-point type. */
1776
1777 static tree
1778 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1779 {
1780 FIXED_VALUE_TYPE value;
1781 tree t;
1782 bool overflow_p;
1783
1784 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1785 TREE_INT_CST (arg1),
1786 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1787 TYPE_SATURATING (type));
1788 t = build_fixed (type, value);
1789
1790 /* Propagate overflow flags. */
1791 if (overflow_p | TREE_OVERFLOW (arg1))
1792 TREE_OVERFLOW (t) = 1;
1793 return t;
1794 }
1795
1796 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1797 to a fixed-point type. */
1798
1799 static tree
1800 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1801 {
1802 FIXED_VALUE_TYPE value;
1803 tree t;
1804 bool overflow_p;
1805
1806 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1807 &TREE_REAL_CST (arg1),
1808 TYPE_SATURATING (type));
1809 t = build_fixed (type, value);
1810
1811 /* Propagate overflow flags. */
1812 if (overflow_p | TREE_OVERFLOW (arg1))
1813 TREE_OVERFLOW (t) = 1;
1814 return t;
1815 }
1816
1817 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1818 type TYPE. If no simplification can be done return NULL_TREE. */
1819
1820 static tree
1821 fold_convert_const (enum tree_code code, tree type, tree arg1)
1822 {
1823 if (TREE_TYPE (arg1) == type)
1824 return arg1;
1825
1826 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1827 || TREE_CODE (type) == OFFSET_TYPE)
1828 {
1829 if (TREE_CODE (arg1) == INTEGER_CST)
1830 return fold_convert_const_int_from_int (type, arg1);
1831 else if (TREE_CODE (arg1) == REAL_CST)
1832 return fold_convert_const_int_from_real (code, type, arg1);
1833 else if (TREE_CODE (arg1) == FIXED_CST)
1834 return fold_convert_const_int_from_fixed (type, arg1);
1835 }
1836 else if (TREE_CODE (type) == REAL_TYPE)
1837 {
1838 if (TREE_CODE (arg1) == INTEGER_CST)
1839 return build_real_from_int_cst (type, arg1);
1840 else if (TREE_CODE (arg1) == REAL_CST)
1841 return fold_convert_const_real_from_real (type, arg1);
1842 else if (TREE_CODE (arg1) == FIXED_CST)
1843 return fold_convert_const_real_from_fixed (type, arg1);
1844 }
1845 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1846 {
1847 if (TREE_CODE (arg1) == FIXED_CST)
1848 return fold_convert_const_fixed_from_fixed (type, arg1);
1849 else if (TREE_CODE (arg1) == INTEGER_CST)
1850 return fold_convert_const_fixed_from_int (type, arg1);
1851 else if (TREE_CODE (arg1) == REAL_CST)
1852 return fold_convert_const_fixed_from_real (type, arg1);
1853 }
1854 return NULL_TREE;
1855 }
1856
1857 /* Construct a vector of zero elements of vector type TYPE. */
1858
1859 static tree
1860 build_zero_vector (tree type)
1861 {
1862 tree t;
1863
1864 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1865 return build_vector_from_val (type, t);
1866 }
1867
1868 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1869
1870 bool
1871 fold_convertible_p (const_tree type, const_tree arg)
1872 {
1873 tree orig = TREE_TYPE (arg);
1874
1875 if (type == orig)
1876 return true;
1877
1878 if (TREE_CODE (arg) == ERROR_MARK
1879 || TREE_CODE (type) == ERROR_MARK
1880 || TREE_CODE (orig) == ERROR_MARK)
1881 return false;
1882
1883 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1884 return true;
1885
1886 switch (TREE_CODE (type))
1887 {
1888 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1889 case POINTER_TYPE: case REFERENCE_TYPE:
1890 case OFFSET_TYPE:
1891 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1892 || TREE_CODE (orig) == OFFSET_TYPE)
1893 return true;
1894 return (TREE_CODE (orig) == VECTOR_TYPE
1895 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1896
1897 case REAL_TYPE:
1898 case FIXED_POINT_TYPE:
1899 case COMPLEX_TYPE:
1900 case VECTOR_TYPE:
1901 case VOID_TYPE:
1902 return TREE_CODE (type) == TREE_CODE (orig);
1903
1904 default:
1905 return false;
1906 }
1907 }
1908
1909 /* Convert expression ARG to type TYPE. Used by the middle-end for
1910 simple conversions in preference to calling the front-end's convert. */
1911
1912 tree
1913 fold_convert_loc (location_t loc, tree type, tree arg)
1914 {
1915 tree orig = TREE_TYPE (arg);
1916 tree tem;
1917
1918 if (type == orig)
1919 return arg;
1920
1921 if (TREE_CODE (arg) == ERROR_MARK
1922 || TREE_CODE (type) == ERROR_MARK
1923 || TREE_CODE (orig) == ERROR_MARK)
1924 return error_mark_node;
1925
1926 switch (TREE_CODE (type))
1927 {
1928 case POINTER_TYPE:
1929 case REFERENCE_TYPE:
1930 /* Handle conversions between pointers to different address spaces. */
1931 if (POINTER_TYPE_P (orig)
1932 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1933 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1934 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1935 /* fall through */
1936
1937 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1938 case OFFSET_TYPE:
1939 if (TREE_CODE (arg) == INTEGER_CST)
1940 {
1941 tem = fold_convert_const (NOP_EXPR, type, arg);
1942 if (tem != NULL_TREE)
1943 return tem;
1944 }
1945 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1946 || TREE_CODE (orig) == OFFSET_TYPE)
1947 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1948 if (TREE_CODE (orig) == COMPLEX_TYPE)
1949 return fold_convert_loc (loc, type,
1950 fold_build1_loc (loc, REALPART_EXPR,
1951 TREE_TYPE (orig), arg));
1952 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1953 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1954 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1955
1956 case REAL_TYPE:
1957 if (TREE_CODE (arg) == INTEGER_CST)
1958 {
1959 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1962 }
1963 else if (TREE_CODE (arg) == REAL_CST)
1964 {
1965 tem = fold_convert_const (NOP_EXPR, type, arg);
1966 if (tem != NULL_TREE)
1967 return tem;
1968 }
1969 else if (TREE_CODE (arg) == FIXED_CST)
1970 {
1971 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1972 if (tem != NULL_TREE)
1973 return tem;
1974 }
1975
1976 switch (TREE_CODE (orig))
1977 {
1978 case INTEGER_TYPE:
1979 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1980 case POINTER_TYPE: case REFERENCE_TYPE:
1981 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1982
1983 case REAL_TYPE:
1984 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1985
1986 case FIXED_POINT_TYPE:
1987 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1988
1989 case COMPLEX_TYPE:
1990 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1991 return fold_convert_loc (loc, type, tem);
1992
1993 default:
1994 gcc_unreachable ();
1995 }
1996
1997 case FIXED_POINT_TYPE:
1998 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1999 || TREE_CODE (arg) == REAL_CST)
2000 {
2001 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2002 if (tem != NULL_TREE)
2003 goto fold_convert_exit;
2004 }
2005
2006 switch (TREE_CODE (orig))
2007 {
2008 case FIXED_POINT_TYPE:
2009 case INTEGER_TYPE:
2010 case ENUMERAL_TYPE:
2011 case BOOLEAN_TYPE:
2012 case REAL_TYPE:
2013 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2014
2015 case COMPLEX_TYPE:
2016 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2017 return fold_convert_loc (loc, type, tem);
2018
2019 default:
2020 gcc_unreachable ();
2021 }
2022
2023 case COMPLEX_TYPE:
2024 switch (TREE_CODE (orig))
2025 {
2026 case INTEGER_TYPE:
2027 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2028 case POINTER_TYPE: case REFERENCE_TYPE:
2029 case REAL_TYPE:
2030 case FIXED_POINT_TYPE:
2031 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2032 fold_convert_loc (loc, TREE_TYPE (type), arg),
2033 fold_convert_loc (loc, TREE_TYPE (type),
2034 integer_zero_node));
2035 case COMPLEX_TYPE:
2036 {
2037 tree rpart, ipart;
2038
2039 if (TREE_CODE (arg) == COMPLEX_EXPR)
2040 {
2041 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2042 TREE_OPERAND (arg, 0));
2043 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2044 TREE_OPERAND (arg, 1));
2045 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2046 }
2047
2048 arg = save_expr (arg);
2049 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2050 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2051 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2052 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2053 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2054 }
2055
2056 default:
2057 gcc_unreachable ();
2058 }
2059
2060 case VECTOR_TYPE:
2061 if (integer_zerop (arg))
2062 return build_zero_vector (type);
2063 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2064 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2065 || TREE_CODE (orig) == VECTOR_TYPE);
2066 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2067
2068 case VOID_TYPE:
2069 tem = fold_ignored_result (arg);
2070 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2071
2072 default:
2073 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2074 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2075 gcc_unreachable ();
2076 }
2077 fold_convert_exit:
2078 protected_set_expr_location_unshare (tem, loc);
2079 return tem;
2080 }
2081 \f
2082 /* Return false if expr can be assumed not to be an lvalue, true
2083 otherwise. */
2084
2085 static bool
2086 maybe_lvalue_p (const_tree x)
2087 {
2088 /* We only need to wrap lvalue tree codes. */
2089 switch (TREE_CODE (x))
2090 {
2091 case VAR_DECL:
2092 case PARM_DECL:
2093 case RESULT_DECL:
2094 case LABEL_DECL:
2095 case FUNCTION_DECL:
2096 case SSA_NAME:
2097
2098 case COMPONENT_REF:
2099 case MEM_REF:
2100 case INDIRECT_REF:
2101 case ARRAY_REF:
2102 case ARRAY_RANGE_REF:
2103 case BIT_FIELD_REF:
2104 case OBJ_TYPE_REF:
2105
2106 case REALPART_EXPR:
2107 case IMAGPART_EXPR:
2108 case PREINCREMENT_EXPR:
2109 case PREDECREMENT_EXPR:
2110 case SAVE_EXPR:
2111 case TRY_CATCH_EXPR:
2112 case WITH_CLEANUP_EXPR:
2113 case COMPOUND_EXPR:
2114 case MODIFY_EXPR:
2115 case TARGET_EXPR:
2116 case COND_EXPR:
2117 case BIND_EXPR:
2118 break;
2119
2120 default:
2121 /* Assume the worst for front-end tree codes. */
2122 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2123 break;
2124 return false;
2125 }
2126
2127 return true;
2128 }
2129
2130 /* Return an expr equal to X but certainly not valid as an lvalue. */
2131
2132 tree
2133 non_lvalue_loc (location_t loc, tree x)
2134 {
2135 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2136 us. */
2137 if (in_gimple_form)
2138 return x;
2139
2140 if (! maybe_lvalue_p (x))
2141 return x;
2142 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2143 }
2144
2145 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2146 Zero means allow extended lvalues. */
2147
2148 int pedantic_lvalues;
2149
2150 /* When pedantic, return an expr equal to X but certainly not valid as a
2151 pedantic lvalue. Otherwise, return X. */
2152
2153 static tree
2154 pedantic_non_lvalue_loc (location_t loc, tree x)
2155 {
2156 if (pedantic_lvalues)
2157 return non_lvalue_loc (loc, x);
2158
2159 return protected_set_expr_location_unshare (x, loc);
2160 }
2161 \f
2162 /* Given a tree comparison code, return the code that is the logical inverse.
2163 It is generally not safe to do this for floating-point comparisons, except
2164 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2165 ERROR_MARK in this case. */
2166
2167 enum tree_code
2168 invert_tree_comparison (enum tree_code code, bool honor_nans)
2169 {
2170 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2171 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2172 return ERROR_MARK;
2173
2174 switch (code)
2175 {
2176 case EQ_EXPR:
2177 return NE_EXPR;
2178 case NE_EXPR:
2179 return EQ_EXPR;
2180 case GT_EXPR:
2181 return honor_nans ? UNLE_EXPR : LE_EXPR;
2182 case GE_EXPR:
2183 return honor_nans ? UNLT_EXPR : LT_EXPR;
2184 case LT_EXPR:
2185 return honor_nans ? UNGE_EXPR : GE_EXPR;
2186 case LE_EXPR:
2187 return honor_nans ? UNGT_EXPR : GT_EXPR;
2188 case LTGT_EXPR:
2189 return UNEQ_EXPR;
2190 case UNEQ_EXPR:
2191 return LTGT_EXPR;
2192 case UNGT_EXPR:
2193 return LE_EXPR;
2194 case UNGE_EXPR:
2195 return LT_EXPR;
2196 case UNLT_EXPR:
2197 return GE_EXPR;
2198 case UNLE_EXPR:
2199 return GT_EXPR;
2200 case ORDERED_EXPR:
2201 return UNORDERED_EXPR;
2202 case UNORDERED_EXPR:
2203 return ORDERED_EXPR;
2204 default:
2205 gcc_unreachable ();
2206 }
2207 }
2208
2209 /* Similar, but return the comparison that results if the operands are
2210 swapped. This is safe for floating-point. */
2211
2212 enum tree_code
2213 swap_tree_comparison (enum tree_code code)
2214 {
2215 switch (code)
2216 {
2217 case EQ_EXPR:
2218 case NE_EXPR:
2219 case ORDERED_EXPR:
2220 case UNORDERED_EXPR:
2221 case LTGT_EXPR:
2222 case UNEQ_EXPR:
2223 return code;
2224 case GT_EXPR:
2225 return LT_EXPR;
2226 case GE_EXPR:
2227 return LE_EXPR;
2228 case LT_EXPR:
2229 return GT_EXPR;
2230 case LE_EXPR:
2231 return GE_EXPR;
2232 case UNGT_EXPR:
2233 return UNLT_EXPR;
2234 case UNGE_EXPR:
2235 return UNLE_EXPR;
2236 case UNLT_EXPR:
2237 return UNGT_EXPR;
2238 case UNLE_EXPR:
2239 return UNGE_EXPR;
2240 default:
2241 gcc_unreachable ();
2242 }
2243 }
2244
2245
2246 /* Convert a comparison tree code from an enum tree_code representation
2247 into a compcode bit-based encoding. This function is the inverse of
2248 compcode_to_comparison. */
2249
2250 static enum comparison_code
2251 comparison_to_compcode (enum tree_code code)
2252 {
2253 switch (code)
2254 {
2255 case LT_EXPR:
2256 return COMPCODE_LT;
2257 case EQ_EXPR:
2258 return COMPCODE_EQ;
2259 case LE_EXPR:
2260 return COMPCODE_LE;
2261 case GT_EXPR:
2262 return COMPCODE_GT;
2263 case NE_EXPR:
2264 return COMPCODE_NE;
2265 case GE_EXPR:
2266 return COMPCODE_GE;
2267 case ORDERED_EXPR:
2268 return COMPCODE_ORD;
2269 case UNORDERED_EXPR:
2270 return COMPCODE_UNORD;
2271 case UNLT_EXPR:
2272 return COMPCODE_UNLT;
2273 case UNEQ_EXPR:
2274 return COMPCODE_UNEQ;
2275 case UNLE_EXPR:
2276 return COMPCODE_UNLE;
2277 case UNGT_EXPR:
2278 return COMPCODE_UNGT;
2279 case LTGT_EXPR:
2280 return COMPCODE_LTGT;
2281 case UNGE_EXPR:
2282 return COMPCODE_UNGE;
2283 default:
2284 gcc_unreachable ();
2285 }
2286 }
2287
2288 /* Convert a compcode bit-based encoding of a comparison operator back
2289 to GCC's enum tree_code representation. This function is the
2290 inverse of comparison_to_compcode. */
2291
2292 static enum tree_code
2293 compcode_to_comparison (enum comparison_code code)
2294 {
2295 switch (code)
2296 {
2297 case COMPCODE_LT:
2298 return LT_EXPR;
2299 case COMPCODE_EQ:
2300 return EQ_EXPR;
2301 case COMPCODE_LE:
2302 return LE_EXPR;
2303 case COMPCODE_GT:
2304 return GT_EXPR;
2305 case COMPCODE_NE:
2306 return NE_EXPR;
2307 case COMPCODE_GE:
2308 return GE_EXPR;
2309 case COMPCODE_ORD:
2310 return ORDERED_EXPR;
2311 case COMPCODE_UNORD:
2312 return UNORDERED_EXPR;
2313 case COMPCODE_UNLT:
2314 return UNLT_EXPR;
2315 case COMPCODE_UNEQ:
2316 return UNEQ_EXPR;
2317 case COMPCODE_UNLE:
2318 return UNLE_EXPR;
2319 case COMPCODE_UNGT:
2320 return UNGT_EXPR;
2321 case COMPCODE_LTGT:
2322 return LTGT_EXPR;
2323 case COMPCODE_UNGE:
2324 return UNGE_EXPR;
2325 default:
2326 gcc_unreachable ();
2327 }
2328 }
2329
2330 /* Return a tree for the comparison which is the combination of
2331 doing the AND or OR (depending on CODE) of the two operations LCODE
2332 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2333 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2334 if this makes the transformation invalid. */
2335
2336 tree
2337 combine_comparisons (location_t loc,
2338 enum tree_code code, enum tree_code lcode,
2339 enum tree_code rcode, tree truth_type,
2340 tree ll_arg, tree lr_arg)
2341 {
2342 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2343 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2344 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2345 int compcode;
2346
2347 switch (code)
2348 {
2349 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2350 compcode = lcompcode & rcompcode;
2351 break;
2352
2353 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2354 compcode = lcompcode | rcompcode;
2355 break;
2356
2357 default:
2358 return NULL_TREE;
2359 }
2360
2361 if (!honor_nans)
2362 {
2363 /* Eliminate unordered comparisons, as well as LTGT and ORD
2364 which are not used unless the mode has NaNs. */
2365 compcode &= ~COMPCODE_UNORD;
2366 if (compcode == COMPCODE_LTGT)
2367 compcode = COMPCODE_NE;
2368 else if (compcode == COMPCODE_ORD)
2369 compcode = COMPCODE_TRUE;
2370 }
2371 else if (flag_trapping_math)
2372 {
2373 /* Check that the original operation and the optimized ones will trap
2374 under the same condition. */
2375 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2376 && (lcompcode != COMPCODE_EQ)
2377 && (lcompcode != COMPCODE_ORD);
2378 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2379 && (rcompcode != COMPCODE_EQ)
2380 && (rcompcode != COMPCODE_ORD);
2381 bool trap = (compcode & COMPCODE_UNORD) == 0
2382 && (compcode != COMPCODE_EQ)
2383 && (compcode != COMPCODE_ORD);
2384
2385 /* In a short-circuited boolean expression the LHS might be
2386 such that the RHS, if evaluated, will never trap. For
2387 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2388 if neither x nor y is NaN. (This is a mixed blessing: for
2389 example, the expression above will never trap, hence
2390 optimizing it to x < y would be invalid). */
2391 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2392 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2393 rtrap = false;
2394
2395 /* If the comparison was short-circuited, and only the RHS
2396 trapped, we may now generate a spurious trap. */
2397 if (rtrap && !ltrap
2398 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2399 return NULL_TREE;
2400
2401 /* If we changed the conditions that cause a trap, we lose. */
2402 if ((ltrap || rtrap) != trap)
2403 return NULL_TREE;
2404 }
2405
2406 if (compcode == COMPCODE_TRUE)
2407 return constant_boolean_node (true, truth_type);
2408 else if (compcode == COMPCODE_FALSE)
2409 return constant_boolean_node (false, truth_type);
2410 else
2411 {
2412 enum tree_code tcode;
2413
2414 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2415 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2416 }
2417 }
2418 \f
2419 /* Return nonzero if two operands (typically of the same tree node)
2420 are necessarily equal. If either argument has side-effects this
2421 function returns zero. FLAGS modifies behavior as follows:
2422
2423 If OEP_ONLY_CONST is set, only return nonzero for constants.
2424 This function tests whether the operands are indistinguishable;
2425 it does not test whether they are equal using C's == operation.
2426 The distinction is important for IEEE floating point, because
2427 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2428 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2429
2430 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2431 even though it may hold multiple values during a function.
2432 This is because a GCC tree node guarantees that nothing else is
2433 executed between the evaluation of its "operands" (which may often
2434 be evaluated in arbitrary order). Hence if the operands themselves
2435 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2436 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2437 unset means assuming isochronic (or instantaneous) tree equivalence.
2438 Unless comparing arbitrary expression trees, such as from different
2439 statements, this flag can usually be left unset.
2440
2441 If OEP_PURE_SAME is set, then pure functions with identical arguments
2442 are considered the same. It is used when the caller has other ways
2443 to ensure that global memory is unchanged in between. */
2444
2445 int
2446 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2447 {
2448 /* If either is ERROR_MARK, they aren't equal. */
2449 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2450 || TREE_TYPE (arg0) == error_mark_node
2451 || TREE_TYPE (arg1) == error_mark_node)
2452 return 0;
2453
2454 /* Similar, if either does not have a type (like a released SSA name),
2455 they aren't equal. */
2456 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2457 return 0;
2458
2459 /* Check equality of integer constants before bailing out due to
2460 precision differences. */
2461 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2462 return tree_int_cst_equal (arg0, arg1);
2463
2464 /* If both types don't have the same signedness, then we can't consider
2465 them equal. We must check this before the STRIP_NOPS calls
2466 because they may change the signedness of the arguments. As pointers
2467 strictly don't have a signedness, require either two pointers or
2468 two non-pointers as well. */
2469 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2470 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2471 return 0;
2472
2473 /* We cannot consider pointers to different address space equal. */
2474 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2475 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2476 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2477 return 0;
2478
2479 /* If both types don't have the same precision, then it is not safe
2480 to strip NOPs. */
2481 if (element_precision (TREE_TYPE (arg0))
2482 != element_precision (TREE_TYPE (arg1)))
2483 return 0;
2484
2485 STRIP_NOPS (arg0);
2486 STRIP_NOPS (arg1);
2487
2488 /* In case both args are comparisons but with different comparison
2489 code, try to swap the comparison operands of one arg to produce
2490 a match and compare that variant. */
2491 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2492 && COMPARISON_CLASS_P (arg0)
2493 && COMPARISON_CLASS_P (arg1))
2494 {
2495 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2496
2497 if (TREE_CODE (arg0) == swap_code)
2498 return operand_equal_p (TREE_OPERAND (arg0, 0),
2499 TREE_OPERAND (arg1, 1), flags)
2500 && operand_equal_p (TREE_OPERAND (arg0, 1),
2501 TREE_OPERAND (arg1, 0), flags);
2502 }
2503
2504 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2505 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2506 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2507 return 0;
2508
2509 /* This is needed for conversions and for COMPONENT_REF.
2510 Might as well play it safe and always test this. */
2511 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2512 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2513 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2514 return 0;
2515
2516 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2517 We don't care about side effects in that case because the SAVE_EXPR
2518 takes care of that for us. In all other cases, two expressions are
2519 equal if they have no side effects. If we have two identical
2520 expressions with side effects that should be treated the same due
2521 to the only side effects being identical SAVE_EXPR's, that will
2522 be detected in the recursive calls below.
2523 If we are taking an invariant address of two identical objects
2524 they are necessarily equal as well. */
2525 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2526 && (TREE_CODE (arg0) == SAVE_EXPR
2527 || (flags & OEP_CONSTANT_ADDRESS_OF)
2528 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2529 return 1;
2530
2531 /* Next handle constant cases, those for which we can return 1 even
2532 if ONLY_CONST is set. */
2533 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2534 switch (TREE_CODE (arg0))
2535 {
2536 case INTEGER_CST:
2537 return tree_int_cst_equal (arg0, arg1);
2538
2539 case FIXED_CST:
2540 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2541 TREE_FIXED_CST (arg1));
2542
2543 case REAL_CST:
2544 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2545 TREE_REAL_CST (arg1)))
2546 return 1;
2547
2548
2549 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2550 {
2551 /* If we do not distinguish between signed and unsigned zero,
2552 consider them equal. */
2553 if (real_zerop (arg0) && real_zerop (arg1))
2554 return 1;
2555 }
2556 return 0;
2557
2558 case VECTOR_CST:
2559 {
2560 unsigned i;
2561
2562 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2563 return 0;
2564
2565 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2566 {
2567 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2568 VECTOR_CST_ELT (arg1, i), flags))
2569 return 0;
2570 }
2571 return 1;
2572 }
2573
2574 case COMPLEX_CST:
2575 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2576 flags)
2577 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2578 flags));
2579
2580 case STRING_CST:
2581 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2582 && ! memcmp (TREE_STRING_POINTER (arg0),
2583 TREE_STRING_POINTER (arg1),
2584 TREE_STRING_LENGTH (arg0)));
2585
2586 case ADDR_EXPR:
2587 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2588 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2589 ? OEP_CONSTANT_ADDRESS_OF : 0);
2590 default:
2591 break;
2592 }
2593
2594 if (flags & OEP_ONLY_CONST)
2595 return 0;
2596
2597 /* Define macros to test an operand from arg0 and arg1 for equality and a
2598 variant that allows null and views null as being different from any
2599 non-null value. In the latter case, if either is null, the both
2600 must be; otherwise, do the normal comparison. */
2601 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2602 TREE_OPERAND (arg1, N), flags)
2603
2604 #define OP_SAME_WITH_NULL(N) \
2605 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2606 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2607
2608 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2609 {
2610 case tcc_unary:
2611 /* Two conversions are equal only if signedness and modes match. */
2612 switch (TREE_CODE (arg0))
2613 {
2614 CASE_CONVERT:
2615 case FIX_TRUNC_EXPR:
2616 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2617 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2618 return 0;
2619 break;
2620 default:
2621 break;
2622 }
2623
2624 return OP_SAME (0);
2625
2626
2627 case tcc_comparison:
2628 case tcc_binary:
2629 if (OP_SAME (0) && OP_SAME (1))
2630 return 1;
2631
2632 /* For commutative ops, allow the other order. */
2633 return (commutative_tree_code (TREE_CODE (arg0))
2634 && operand_equal_p (TREE_OPERAND (arg0, 0),
2635 TREE_OPERAND (arg1, 1), flags)
2636 && operand_equal_p (TREE_OPERAND (arg0, 1),
2637 TREE_OPERAND (arg1, 0), flags));
2638
2639 case tcc_reference:
2640 /* If either of the pointer (or reference) expressions we are
2641 dereferencing contain a side effect, these cannot be equal,
2642 but their addresses can be. */
2643 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2644 && (TREE_SIDE_EFFECTS (arg0)
2645 || TREE_SIDE_EFFECTS (arg1)))
2646 return 0;
2647
2648 switch (TREE_CODE (arg0))
2649 {
2650 case INDIRECT_REF:
2651 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2652 return OP_SAME (0);
2653
2654 case REALPART_EXPR:
2655 case IMAGPART_EXPR:
2656 return OP_SAME (0);
2657
2658 case TARGET_MEM_REF:
2659 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2660 /* Require equal extra operands and then fall through to MEM_REF
2661 handling of the two common operands. */
2662 if (!OP_SAME_WITH_NULL (2)
2663 || !OP_SAME_WITH_NULL (3)
2664 || !OP_SAME_WITH_NULL (4))
2665 return 0;
2666 /* Fallthru. */
2667 case MEM_REF:
2668 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2669 /* Require equal access sizes, and similar pointer types.
2670 We can have incomplete types for array references of
2671 variable-sized arrays from the Fortran frontend
2672 though. Also verify the types are compatible. */
2673 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2674 || (TYPE_SIZE (TREE_TYPE (arg0))
2675 && TYPE_SIZE (TREE_TYPE (arg1))
2676 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2677 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2678 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2679 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2680 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2681 && OP_SAME (0) && OP_SAME (1));
2682
2683 case ARRAY_REF:
2684 case ARRAY_RANGE_REF:
2685 /* Operands 2 and 3 may be null.
2686 Compare the array index by value if it is constant first as we
2687 may have different types but same value here. */
2688 if (!OP_SAME (0))
2689 return 0;
2690 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2691 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2692 TREE_OPERAND (arg1, 1))
2693 || OP_SAME (1))
2694 && OP_SAME_WITH_NULL (2)
2695 && OP_SAME_WITH_NULL (3));
2696
2697 case COMPONENT_REF:
2698 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2699 may be NULL when we're called to compare MEM_EXPRs. */
2700 if (!OP_SAME_WITH_NULL (0))
2701 return 0;
2702 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2703 return OP_SAME (1) && OP_SAME_WITH_NULL (2);
2704
2705 case BIT_FIELD_REF:
2706 if (!OP_SAME (0))
2707 return 0;
2708 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2709 return OP_SAME (1) && OP_SAME (2);
2710
2711 default:
2712 return 0;
2713 }
2714
2715 case tcc_expression:
2716 switch (TREE_CODE (arg0))
2717 {
2718 case ADDR_EXPR:
2719 case TRUTH_NOT_EXPR:
2720 return OP_SAME (0);
2721
2722 case TRUTH_ANDIF_EXPR:
2723 case TRUTH_ORIF_EXPR:
2724 return OP_SAME (0) && OP_SAME (1);
2725
2726 case FMA_EXPR:
2727 case WIDEN_MULT_PLUS_EXPR:
2728 case WIDEN_MULT_MINUS_EXPR:
2729 if (!OP_SAME (2))
2730 return 0;
2731 /* The multiplcation operands are commutative. */
2732 /* FALLTHRU */
2733
2734 case TRUTH_AND_EXPR:
2735 case TRUTH_OR_EXPR:
2736 case TRUTH_XOR_EXPR:
2737 if (OP_SAME (0) && OP_SAME (1))
2738 return 1;
2739
2740 /* Otherwise take into account this is a commutative operation. */
2741 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2742 TREE_OPERAND (arg1, 1), flags)
2743 && operand_equal_p (TREE_OPERAND (arg0, 1),
2744 TREE_OPERAND (arg1, 0), flags));
2745
2746 case COND_EXPR:
2747 case VEC_COND_EXPR:
2748 case DOT_PROD_EXPR:
2749 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2750
2751 default:
2752 return 0;
2753 }
2754
2755 case tcc_vl_exp:
2756 switch (TREE_CODE (arg0))
2757 {
2758 case CALL_EXPR:
2759 /* If the CALL_EXPRs call different functions, then they
2760 clearly can not be equal. */
2761 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2762 flags))
2763 return 0;
2764
2765 {
2766 unsigned int cef = call_expr_flags (arg0);
2767 if (flags & OEP_PURE_SAME)
2768 cef &= ECF_CONST | ECF_PURE;
2769 else
2770 cef &= ECF_CONST;
2771 if (!cef)
2772 return 0;
2773 }
2774
2775 /* Now see if all the arguments are the same. */
2776 {
2777 const_call_expr_arg_iterator iter0, iter1;
2778 const_tree a0, a1;
2779 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2780 a1 = first_const_call_expr_arg (arg1, &iter1);
2781 a0 && a1;
2782 a0 = next_const_call_expr_arg (&iter0),
2783 a1 = next_const_call_expr_arg (&iter1))
2784 if (! operand_equal_p (a0, a1, flags))
2785 return 0;
2786
2787 /* If we get here and both argument lists are exhausted
2788 then the CALL_EXPRs are equal. */
2789 return ! (a0 || a1);
2790 }
2791 default:
2792 return 0;
2793 }
2794
2795 case tcc_declaration:
2796 /* Consider __builtin_sqrt equal to sqrt. */
2797 return (TREE_CODE (arg0) == FUNCTION_DECL
2798 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2799 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2800 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2801
2802 default:
2803 return 0;
2804 }
2805
2806 #undef OP_SAME
2807 #undef OP_SAME_WITH_NULL
2808 }
2809 \f
2810 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2811 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2812
2813 When in doubt, return 0. */
2814
2815 static int
2816 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2817 {
2818 int unsignedp1, unsignedpo;
2819 tree primarg0, primarg1, primother;
2820 unsigned int correct_width;
2821
2822 if (operand_equal_p (arg0, arg1, 0))
2823 return 1;
2824
2825 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2826 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2827 return 0;
2828
2829 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2830 and see if the inner values are the same. This removes any
2831 signedness comparison, which doesn't matter here. */
2832 primarg0 = arg0, primarg1 = arg1;
2833 STRIP_NOPS (primarg0);
2834 STRIP_NOPS (primarg1);
2835 if (operand_equal_p (primarg0, primarg1, 0))
2836 return 1;
2837
2838 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2839 actual comparison operand, ARG0.
2840
2841 First throw away any conversions to wider types
2842 already present in the operands. */
2843
2844 primarg1 = get_narrower (arg1, &unsignedp1);
2845 primother = get_narrower (other, &unsignedpo);
2846
2847 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2848 if (unsignedp1 == unsignedpo
2849 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2850 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2851 {
2852 tree type = TREE_TYPE (arg0);
2853
2854 /* Make sure shorter operand is extended the right way
2855 to match the longer operand. */
2856 primarg1 = fold_convert (signed_or_unsigned_type_for
2857 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2858
2859 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2860 return 1;
2861 }
2862
2863 return 0;
2864 }
2865 \f
2866 /* See if ARG is an expression that is either a comparison or is performing
2867 arithmetic on comparisons. The comparisons must only be comparing
2868 two different values, which will be stored in *CVAL1 and *CVAL2; if
2869 they are nonzero it means that some operands have already been found.
2870 No variables may be used anywhere else in the expression except in the
2871 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2872 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2873
2874 If this is true, return 1. Otherwise, return zero. */
2875
2876 static int
2877 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2878 {
2879 enum tree_code code = TREE_CODE (arg);
2880 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2881
2882 /* We can handle some of the tcc_expression cases here. */
2883 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2884 tclass = tcc_unary;
2885 else if (tclass == tcc_expression
2886 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2887 || code == COMPOUND_EXPR))
2888 tclass = tcc_binary;
2889
2890 else if (tclass == tcc_expression && code == SAVE_EXPR
2891 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2892 {
2893 /* If we've already found a CVAL1 or CVAL2, this expression is
2894 two complex to handle. */
2895 if (*cval1 || *cval2)
2896 return 0;
2897
2898 tclass = tcc_unary;
2899 *save_p = 1;
2900 }
2901
2902 switch (tclass)
2903 {
2904 case tcc_unary:
2905 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2906
2907 case tcc_binary:
2908 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2909 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2910 cval1, cval2, save_p));
2911
2912 case tcc_constant:
2913 return 1;
2914
2915 case tcc_expression:
2916 if (code == COND_EXPR)
2917 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2918 cval1, cval2, save_p)
2919 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2920 cval1, cval2, save_p)
2921 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2922 cval1, cval2, save_p));
2923 return 0;
2924
2925 case tcc_comparison:
2926 /* First see if we can handle the first operand, then the second. For
2927 the second operand, we know *CVAL1 can't be zero. It must be that
2928 one side of the comparison is each of the values; test for the
2929 case where this isn't true by failing if the two operands
2930 are the same. */
2931
2932 if (operand_equal_p (TREE_OPERAND (arg, 0),
2933 TREE_OPERAND (arg, 1), 0))
2934 return 0;
2935
2936 if (*cval1 == 0)
2937 *cval1 = TREE_OPERAND (arg, 0);
2938 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2939 ;
2940 else if (*cval2 == 0)
2941 *cval2 = TREE_OPERAND (arg, 0);
2942 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2943 ;
2944 else
2945 return 0;
2946
2947 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2948 ;
2949 else if (*cval2 == 0)
2950 *cval2 = TREE_OPERAND (arg, 1);
2951 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2952 ;
2953 else
2954 return 0;
2955
2956 return 1;
2957
2958 default:
2959 return 0;
2960 }
2961 }
2962 \f
2963 /* ARG is a tree that is known to contain just arithmetic operations and
2964 comparisons. Evaluate the operations in the tree substituting NEW0 for
2965 any occurrence of OLD0 as an operand of a comparison and likewise for
2966 NEW1 and OLD1. */
2967
2968 static tree
2969 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2970 tree old1, tree new1)
2971 {
2972 tree type = TREE_TYPE (arg);
2973 enum tree_code code = TREE_CODE (arg);
2974 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2975
2976 /* We can handle some of the tcc_expression cases here. */
2977 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2978 tclass = tcc_unary;
2979 else if (tclass == tcc_expression
2980 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2981 tclass = tcc_binary;
2982
2983 switch (tclass)
2984 {
2985 case tcc_unary:
2986 return fold_build1_loc (loc, code, type,
2987 eval_subst (loc, TREE_OPERAND (arg, 0),
2988 old0, new0, old1, new1));
2989
2990 case tcc_binary:
2991 return fold_build2_loc (loc, code, type,
2992 eval_subst (loc, TREE_OPERAND (arg, 0),
2993 old0, new0, old1, new1),
2994 eval_subst (loc, TREE_OPERAND (arg, 1),
2995 old0, new0, old1, new1));
2996
2997 case tcc_expression:
2998 switch (code)
2999 {
3000 case SAVE_EXPR:
3001 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3002 old1, new1);
3003
3004 case COMPOUND_EXPR:
3005 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3006 old1, new1);
3007
3008 case COND_EXPR:
3009 return fold_build3_loc (loc, code, type,
3010 eval_subst (loc, TREE_OPERAND (arg, 0),
3011 old0, new0, old1, new1),
3012 eval_subst (loc, TREE_OPERAND (arg, 1),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 2),
3015 old0, new0, old1, new1));
3016 default:
3017 break;
3018 }
3019 /* Fall through - ??? */
3020
3021 case tcc_comparison:
3022 {
3023 tree arg0 = TREE_OPERAND (arg, 0);
3024 tree arg1 = TREE_OPERAND (arg, 1);
3025
3026 /* We need to check both for exact equality and tree equality. The
3027 former will be true if the operand has a side-effect. In that
3028 case, we know the operand occurred exactly once. */
3029
3030 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3031 arg0 = new0;
3032 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3033 arg0 = new1;
3034
3035 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3036 arg1 = new0;
3037 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3038 arg1 = new1;
3039
3040 return fold_build2_loc (loc, code, type, arg0, arg1);
3041 }
3042
3043 default:
3044 return arg;
3045 }
3046 }
3047 \f
3048 /* Return a tree for the case when the result of an expression is RESULT
3049 converted to TYPE and OMITTED was previously an operand of the expression
3050 but is now not needed (e.g., we folded OMITTED * 0).
3051
3052 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3053 the conversion of RESULT to TYPE. */
3054
3055 tree
3056 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3057 {
3058 tree t = fold_convert_loc (loc, type, result);
3059
3060 /* If the resulting operand is an empty statement, just return the omitted
3061 statement casted to void. */
3062 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3063 return build1_loc (loc, NOP_EXPR, void_type_node,
3064 fold_ignored_result (omitted));
3065
3066 if (TREE_SIDE_EFFECTS (omitted))
3067 return build2_loc (loc, COMPOUND_EXPR, type,
3068 fold_ignored_result (omitted), t);
3069
3070 return non_lvalue_loc (loc, t);
3071 }
3072
3073 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3074
3075 static tree
3076 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3077 tree omitted)
3078 {
3079 tree t = fold_convert_loc (loc, type, result);
3080
3081 /* If the resulting operand is an empty statement, just return the omitted
3082 statement casted to void. */
3083 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3084 return build1_loc (loc, NOP_EXPR, void_type_node,
3085 fold_ignored_result (omitted));
3086
3087 if (TREE_SIDE_EFFECTS (omitted))
3088 return build2_loc (loc, COMPOUND_EXPR, type,
3089 fold_ignored_result (omitted), t);
3090
3091 return pedantic_non_lvalue_loc (loc, t);
3092 }
3093
3094 /* Return a tree for the case when the result of an expression is RESULT
3095 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3096 of the expression but are now not needed.
3097
3098 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3099 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3100 evaluated before OMITTED2. Otherwise, if neither has side effects,
3101 just do the conversion of RESULT to TYPE. */
3102
3103 tree
3104 omit_two_operands_loc (location_t loc, tree type, tree result,
3105 tree omitted1, tree omitted2)
3106 {
3107 tree t = fold_convert_loc (loc, type, result);
3108
3109 if (TREE_SIDE_EFFECTS (omitted2))
3110 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3111 if (TREE_SIDE_EFFECTS (omitted1))
3112 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3113
3114 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3115 }
3116
3117 \f
3118 /* Return a simplified tree node for the truth-negation of ARG. This
3119 never alters ARG itself. We assume that ARG is an operation that
3120 returns a truth value (0 or 1).
3121
3122 FIXME: one would think we would fold the result, but it causes
3123 problems with the dominator optimizer. */
3124
3125 static tree
3126 fold_truth_not_expr (location_t loc, tree arg)
3127 {
3128 tree type = TREE_TYPE (arg);
3129 enum tree_code code = TREE_CODE (arg);
3130 location_t loc1, loc2;
3131
3132 /* If this is a comparison, we can simply invert it, except for
3133 floating-point non-equality comparisons, in which case we just
3134 enclose a TRUTH_NOT_EXPR around what we have. */
3135
3136 if (TREE_CODE_CLASS (code) == tcc_comparison)
3137 {
3138 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3139 if (FLOAT_TYPE_P (op_type)
3140 && flag_trapping_math
3141 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3142 && code != NE_EXPR && code != EQ_EXPR)
3143 return NULL_TREE;
3144
3145 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3146 if (code == ERROR_MARK)
3147 return NULL_TREE;
3148
3149 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3150 TREE_OPERAND (arg, 1));
3151 }
3152
3153 switch (code)
3154 {
3155 case INTEGER_CST:
3156 return constant_boolean_node (integer_zerop (arg), type);
3157
3158 case TRUTH_AND_EXPR:
3159 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3160 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3161 return build2_loc (loc, TRUTH_OR_EXPR, type,
3162 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3163 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3164
3165 case TRUTH_OR_EXPR:
3166 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3167 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3168 return build2_loc (loc, TRUTH_AND_EXPR, type,
3169 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3170 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3171
3172 case TRUTH_XOR_EXPR:
3173 /* Here we can invert either operand. We invert the first operand
3174 unless the second operand is a TRUTH_NOT_EXPR in which case our
3175 result is the XOR of the first operand with the inside of the
3176 negation of the second operand. */
3177
3178 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3179 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3180 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3181 else
3182 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3183 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3184 TREE_OPERAND (arg, 1));
3185
3186 case TRUTH_ANDIF_EXPR:
3187 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3188 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3189 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3190 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3191 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3192
3193 case TRUTH_ORIF_EXPR:
3194 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3195 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3196 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3197 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3198 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3199
3200 case TRUTH_NOT_EXPR:
3201 return TREE_OPERAND (arg, 0);
3202
3203 case COND_EXPR:
3204 {
3205 tree arg1 = TREE_OPERAND (arg, 1);
3206 tree arg2 = TREE_OPERAND (arg, 2);
3207
3208 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3209 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3210
3211 /* A COND_EXPR may have a throw as one operand, which
3212 then has void type. Just leave void operands
3213 as they are. */
3214 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3215 VOID_TYPE_P (TREE_TYPE (arg1))
3216 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3217 VOID_TYPE_P (TREE_TYPE (arg2))
3218 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3219 }
3220
3221 case COMPOUND_EXPR:
3222 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3223 return build2_loc (loc, COMPOUND_EXPR, type,
3224 TREE_OPERAND (arg, 0),
3225 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3226
3227 case NON_LVALUE_EXPR:
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3229 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3230
3231 CASE_CONVERT:
3232 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3233 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3234
3235 /* ... fall through ... */
3236
3237 case FLOAT_EXPR:
3238 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3239 return build1_loc (loc, TREE_CODE (arg), type,
3240 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3241
3242 case BIT_AND_EXPR:
3243 if (!integer_onep (TREE_OPERAND (arg, 1)))
3244 return NULL_TREE;
3245 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3246
3247 case SAVE_EXPR:
3248 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3249
3250 case CLEANUP_POINT_EXPR:
3251 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3252 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3253 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3254
3255 default:
3256 return NULL_TREE;
3257 }
3258 }
3259
3260 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3261 assume that ARG is an operation that returns a truth value (0 or 1
3262 for scalars, 0 or -1 for vectors). Return the folded expression if
3263 folding is successful. Otherwise, return NULL_TREE. */
3264
3265 static tree
3266 fold_invert_truthvalue (location_t loc, tree arg)
3267 {
3268 tree type = TREE_TYPE (arg);
3269 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3270 ? BIT_NOT_EXPR
3271 : TRUTH_NOT_EXPR,
3272 type, arg);
3273 }
3274
3275 /* Return a simplified tree node for the truth-negation of ARG. This
3276 never alters ARG itself. We assume that ARG is an operation that
3277 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3278
3279 tree
3280 invert_truthvalue_loc (location_t loc, tree arg)
3281 {
3282 if (TREE_CODE (arg) == ERROR_MARK)
3283 return arg;
3284
3285 tree type = TREE_TYPE (arg);
3286 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3287 ? BIT_NOT_EXPR
3288 : TRUTH_NOT_EXPR,
3289 type, arg);
3290 }
3291
3292 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3293 operands are another bit-wise operation with a common input. If so,
3294 distribute the bit operations to save an operation and possibly two if
3295 constants are involved. For example, convert
3296 (A | B) & (A | C) into A | (B & C)
3297 Further simplification will occur if B and C are constants.
3298
3299 If this optimization cannot be done, 0 will be returned. */
3300
3301 static tree
3302 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3303 tree arg0, tree arg1)
3304 {
3305 tree common;
3306 tree left, right;
3307
3308 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3309 || TREE_CODE (arg0) == code
3310 || (TREE_CODE (arg0) != BIT_AND_EXPR
3311 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3312 return 0;
3313
3314 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3315 {
3316 common = TREE_OPERAND (arg0, 0);
3317 left = TREE_OPERAND (arg0, 1);
3318 right = TREE_OPERAND (arg1, 1);
3319 }
3320 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3321 {
3322 common = TREE_OPERAND (arg0, 0);
3323 left = TREE_OPERAND (arg0, 1);
3324 right = TREE_OPERAND (arg1, 0);
3325 }
3326 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3327 {
3328 common = TREE_OPERAND (arg0, 1);
3329 left = TREE_OPERAND (arg0, 0);
3330 right = TREE_OPERAND (arg1, 1);
3331 }
3332 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3333 {
3334 common = TREE_OPERAND (arg0, 1);
3335 left = TREE_OPERAND (arg0, 0);
3336 right = TREE_OPERAND (arg1, 0);
3337 }
3338 else
3339 return 0;
3340
3341 common = fold_convert_loc (loc, type, common);
3342 left = fold_convert_loc (loc, type, left);
3343 right = fold_convert_loc (loc, type, right);
3344 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3345 fold_build2_loc (loc, code, type, left, right));
3346 }
3347
3348 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3349 with code CODE. This optimization is unsafe. */
3350 static tree
3351 distribute_real_division (location_t loc, enum tree_code code, tree type,
3352 tree arg0, tree arg1)
3353 {
3354 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3355 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3356
3357 /* (A / C) +- (B / C) -> (A +- B) / C. */
3358 if (mul0 == mul1
3359 && operand_equal_p (TREE_OPERAND (arg0, 1),
3360 TREE_OPERAND (arg1, 1), 0))
3361 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3362 fold_build2_loc (loc, code, type,
3363 TREE_OPERAND (arg0, 0),
3364 TREE_OPERAND (arg1, 0)),
3365 TREE_OPERAND (arg0, 1));
3366
3367 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3368 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3369 TREE_OPERAND (arg1, 0), 0)
3370 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3371 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3372 {
3373 REAL_VALUE_TYPE r0, r1;
3374 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3375 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3376 if (!mul0)
3377 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3378 if (!mul1)
3379 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3380 real_arithmetic (&r0, code, &r0, &r1);
3381 return fold_build2_loc (loc, MULT_EXPR, type,
3382 TREE_OPERAND (arg0, 0),
3383 build_real (type, r0));
3384 }
3385
3386 return NULL_TREE;
3387 }
3388 \f
3389 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3390 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3391
3392 static tree
3393 make_bit_field_ref (location_t loc, tree inner, tree type,
3394 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3395 {
3396 tree result, bftype;
3397
3398 if (bitpos == 0)
3399 {
3400 tree size = TYPE_SIZE (TREE_TYPE (inner));
3401 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3402 || POINTER_TYPE_P (TREE_TYPE (inner)))
3403 && host_integerp (size, 0)
3404 && tree_low_cst (size, 0) == bitsize)
3405 return fold_convert_loc (loc, type, inner);
3406 }
3407
3408 bftype = type;
3409 if (TYPE_PRECISION (bftype) != bitsize
3410 || TYPE_UNSIGNED (bftype) == !unsignedp)
3411 bftype = build_nonstandard_integer_type (bitsize, 0);
3412
3413 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3414 size_int (bitsize), bitsize_int (bitpos));
3415
3416 if (bftype != type)
3417 result = fold_convert_loc (loc, type, result);
3418
3419 return result;
3420 }
3421
3422 /* Optimize a bit-field compare.
3423
3424 There are two cases: First is a compare against a constant and the
3425 second is a comparison of two items where the fields are at the same
3426 bit position relative to the start of a chunk (byte, halfword, word)
3427 large enough to contain it. In these cases we can avoid the shift
3428 implicit in bitfield extractions.
3429
3430 For constants, we emit a compare of the shifted constant with the
3431 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3432 compared. For two fields at the same position, we do the ANDs with the
3433 similar mask and compare the result of the ANDs.
3434
3435 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3436 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3437 are the left and right operands of the comparison, respectively.
3438
3439 If the optimization described above can be done, we return the resulting
3440 tree. Otherwise we return zero. */
3441
3442 static tree
3443 optimize_bit_field_compare (location_t loc, enum tree_code code,
3444 tree compare_type, tree lhs, tree rhs)
3445 {
3446 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3447 tree type = TREE_TYPE (lhs);
3448 tree signed_type, unsigned_type;
3449 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3450 enum machine_mode lmode, rmode, nmode;
3451 int lunsignedp, runsignedp;
3452 int lvolatilep = 0, rvolatilep = 0;
3453 tree linner, rinner = NULL_TREE;
3454 tree mask;
3455 tree offset;
3456
3457 /* In the strict volatile bitfields case, doing code changes here may prevent
3458 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3459 if (flag_strict_volatile_bitfields > 0)
3460 return 0;
3461
3462 /* Get all the information about the extractions being done. If the bit size
3463 if the same as the size of the underlying object, we aren't doing an
3464 extraction at all and so can do nothing. We also don't want to
3465 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3466 then will no longer be able to replace it. */
3467 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3468 &lunsignedp, &lvolatilep, false);
3469 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3470 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3471 return 0;
3472
3473 if (!const_p)
3474 {
3475 /* If this is not a constant, we can only do something if bit positions,
3476 sizes, and signedness are the same. */
3477 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3478 &runsignedp, &rvolatilep, false);
3479
3480 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3481 || lunsignedp != runsignedp || offset != 0
3482 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3483 return 0;
3484 }
3485
3486 /* See if we can find a mode to refer to this field. We should be able to,
3487 but fail if we can't. */
3488 if (lvolatilep
3489 && GET_MODE_BITSIZE (lmode) > 0
3490 && flag_strict_volatile_bitfields > 0)
3491 nmode = lmode;
3492 else
3493 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3494 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3495 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3496 TYPE_ALIGN (TREE_TYPE (rinner))),
3497 word_mode, lvolatilep || rvolatilep);
3498 if (nmode == VOIDmode)
3499 return 0;
3500
3501 /* Set signed and unsigned types of the precision of this mode for the
3502 shifts below. */
3503 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3504 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3505
3506 /* Compute the bit position and size for the new reference and our offset
3507 within it. If the new reference is the same size as the original, we
3508 won't optimize anything, so return zero. */
3509 nbitsize = GET_MODE_BITSIZE (nmode);
3510 nbitpos = lbitpos & ~ (nbitsize - 1);
3511 lbitpos -= nbitpos;
3512 if (nbitsize == lbitsize)
3513 return 0;
3514
3515 if (BYTES_BIG_ENDIAN)
3516 lbitpos = nbitsize - lbitsize - lbitpos;
3517
3518 /* Make the mask to be used against the extracted field. */
3519 mask = build_int_cst_type (unsigned_type, -1);
3520 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3521 mask = const_binop (RSHIFT_EXPR, mask,
3522 size_int (nbitsize - lbitsize - lbitpos));
3523
3524 if (! const_p)
3525 /* If not comparing with constant, just rework the comparison
3526 and return. */
3527 return fold_build2_loc (loc, code, compare_type,
3528 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3529 make_bit_field_ref (loc, linner,
3530 unsigned_type,
3531 nbitsize, nbitpos,
3532 1),
3533 mask),
3534 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3535 make_bit_field_ref (loc, rinner,
3536 unsigned_type,
3537 nbitsize, nbitpos,
3538 1),
3539 mask));
3540
3541 /* Otherwise, we are handling the constant case. See if the constant is too
3542 big for the field. Warn and return a tree of for 0 (false) if so. We do
3543 this not only for its own sake, but to avoid having to test for this
3544 error case below. If we didn't, we might generate wrong code.
3545
3546 For unsigned fields, the constant shifted right by the field length should
3547 be all zero. For signed fields, the high-order bits should agree with
3548 the sign bit. */
3549
3550 if (lunsignedp)
3551 {
3552 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3553 fold_convert_loc (loc,
3554 unsigned_type, rhs),
3555 size_int (lbitsize))))
3556 {
3557 warning (0, "comparison is always %d due to width of bit-field",
3558 code == NE_EXPR);
3559 return constant_boolean_node (code == NE_EXPR, compare_type);
3560 }
3561 }
3562 else
3563 {
3564 tree tem = const_binop (RSHIFT_EXPR,
3565 fold_convert_loc (loc, signed_type, rhs),
3566 size_int (lbitsize - 1));
3567 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3568 {
3569 warning (0, "comparison is always %d due to width of bit-field",
3570 code == NE_EXPR);
3571 return constant_boolean_node (code == NE_EXPR, compare_type);
3572 }
3573 }
3574
3575 /* Single-bit compares should always be against zero. */
3576 if (lbitsize == 1 && ! integer_zerop (rhs))
3577 {
3578 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3579 rhs = build_int_cst (type, 0);
3580 }
3581
3582 /* Make a new bitfield reference, shift the constant over the
3583 appropriate number of bits and mask it with the computed mask
3584 (in case this was a signed field). If we changed it, make a new one. */
3585 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3586 if (lvolatilep)
3587 {
3588 TREE_SIDE_EFFECTS (lhs) = 1;
3589 TREE_THIS_VOLATILE (lhs) = 1;
3590 }
3591
3592 rhs = const_binop (BIT_AND_EXPR,
3593 const_binop (LSHIFT_EXPR,
3594 fold_convert_loc (loc, unsigned_type, rhs),
3595 size_int (lbitpos)),
3596 mask);
3597
3598 lhs = build2_loc (loc, code, compare_type,
3599 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3600 return lhs;
3601 }
3602 \f
3603 /* Subroutine for fold_truth_andor_1: decode a field reference.
3604
3605 If EXP is a comparison reference, we return the innermost reference.
3606
3607 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3608 set to the starting bit number.
3609
3610 If the innermost field can be completely contained in a mode-sized
3611 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3612
3613 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3614 otherwise it is not changed.
3615
3616 *PUNSIGNEDP is set to the signedness of the field.
3617
3618 *PMASK is set to the mask used. This is either contained in a
3619 BIT_AND_EXPR or derived from the width of the field.
3620
3621 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3622
3623 Return 0 if this is not a component reference or is one that we can't
3624 do anything with. */
3625
3626 static tree
3627 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3628 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3629 int *punsignedp, int *pvolatilep,
3630 tree *pmask, tree *pand_mask)
3631 {
3632 tree outer_type = 0;
3633 tree and_mask = 0;
3634 tree mask, inner, offset;
3635 tree unsigned_type;
3636 unsigned int precision;
3637
3638 /* All the optimizations using this function assume integer fields.
3639 There are problems with FP fields since the type_for_size call
3640 below can fail for, e.g., XFmode. */
3641 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3642 return 0;
3643
3644 /* We are interested in the bare arrangement of bits, so strip everything
3645 that doesn't affect the machine mode. However, record the type of the
3646 outermost expression if it may matter below. */
3647 if (CONVERT_EXPR_P (exp)
3648 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3649 outer_type = TREE_TYPE (exp);
3650 STRIP_NOPS (exp);
3651
3652 if (TREE_CODE (exp) == BIT_AND_EXPR)
3653 {
3654 and_mask = TREE_OPERAND (exp, 1);
3655 exp = TREE_OPERAND (exp, 0);
3656 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3657 if (TREE_CODE (and_mask) != INTEGER_CST)
3658 return 0;
3659 }
3660
3661 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3662 punsignedp, pvolatilep, false);
3663 if ((inner == exp && and_mask == 0)
3664 || *pbitsize < 0 || offset != 0
3665 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3666 return 0;
3667
3668 /* If the number of bits in the reference is the same as the bitsize of
3669 the outer type, then the outer type gives the signedness. Otherwise
3670 (in case of a small bitfield) the signedness is unchanged. */
3671 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3672 *punsignedp = TYPE_UNSIGNED (outer_type);
3673
3674 /* Compute the mask to access the bitfield. */
3675 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3676 precision = TYPE_PRECISION (unsigned_type);
3677
3678 mask = build_int_cst_type (unsigned_type, -1);
3679
3680 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3681 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3682
3683 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3684 if (and_mask != 0)
3685 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3686 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3687
3688 *pmask = mask;
3689 *pand_mask = and_mask;
3690 return inner;
3691 }
3692
3693 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3694 bit positions. */
3695
3696 static int
3697 all_ones_mask_p (const_tree mask, int size)
3698 {
3699 tree type = TREE_TYPE (mask);
3700 unsigned int precision = TYPE_PRECISION (type);
3701 tree tmask;
3702
3703 tmask = build_int_cst_type (signed_type_for (type), -1);
3704
3705 return
3706 tree_int_cst_equal (mask,
3707 const_binop (RSHIFT_EXPR,
3708 const_binop (LSHIFT_EXPR, tmask,
3709 size_int (precision - size)),
3710 size_int (precision - size)));
3711 }
3712
3713 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3714 represents the sign bit of EXP's type. If EXP represents a sign
3715 or zero extension, also test VAL against the unextended type.
3716 The return value is the (sub)expression whose sign bit is VAL,
3717 or NULL_TREE otherwise. */
3718
3719 static tree
3720 sign_bit_p (tree exp, const_tree val)
3721 {
3722 unsigned HOST_WIDE_INT mask_lo, lo;
3723 HOST_WIDE_INT mask_hi, hi;
3724 int width;
3725 tree t;
3726
3727 /* Tree EXP must have an integral type. */
3728 t = TREE_TYPE (exp);
3729 if (! INTEGRAL_TYPE_P (t))
3730 return NULL_TREE;
3731
3732 /* Tree VAL must be an integer constant. */
3733 if (TREE_CODE (val) != INTEGER_CST
3734 || TREE_OVERFLOW (val))
3735 return NULL_TREE;
3736
3737 width = TYPE_PRECISION (t);
3738 if (width > HOST_BITS_PER_WIDE_INT)
3739 {
3740 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3741 lo = 0;
3742
3743 mask_hi = ((unsigned HOST_WIDE_INT) -1
3744 >> (HOST_BITS_PER_DOUBLE_INT - width));
3745 mask_lo = -1;
3746 }
3747 else
3748 {
3749 hi = 0;
3750 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3751
3752 mask_hi = 0;
3753 mask_lo = ((unsigned HOST_WIDE_INT) -1
3754 >> (HOST_BITS_PER_WIDE_INT - width));
3755 }
3756
3757 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3758 treat VAL as if it were unsigned. */
3759 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3760 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3761 return exp;
3762
3763 /* Handle extension from a narrower type. */
3764 if (TREE_CODE (exp) == NOP_EXPR
3765 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3766 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3767
3768 return NULL_TREE;
3769 }
3770
3771 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3772 to be evaluated unconditionally. */
3773
3774 static int
3775 simple_operand_p (const_tree exp)
3776 {
3777 /* Strip any conversions that don't change the machine mode. */
3778 STRIP_NOPS (exp);
3779
3780 return (CONSTANT_CLASS_P (exp)
3781 || TREE_CODE (exp) == SSA_NAME
3782 || (DECL_P (exp)
3783 && ! TREE_ADDRESSABLE (exp)
3784 && ! TREE_THIS_VOLATILE (exp)
3785 && ! DECL_NONLOCAL (exp)
3786 /* Don't regard global variables as simple. They may be
3787 allocated in ways unknown to the compiler (shared memory,
3788 #pragma weak, etc). */
3789 && ! TREE_PUBLIC (exp)
3790 && ! DECL_EXTERNAL (exp)
3791 /* Weakrefs are not safe to be read, since they can be NULL.
3792 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3793 have DECL_WEAK flag set. */
3794 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3795 /* Loading a static variable is unduly expensive, but global
3796 registers aren't expensive. */
3797 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3798 }
3799
3800 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3801 to be evaluated unconditionally.
3802 I addition to simple_operand_p, we assume that comparisons, conversions,
3803 and logic-not operations are simple, if their operands are simple, too. */
3804
3805 static bool
3806 simple_operand_p_2 (tree exp)
3807 {
3808 enum tree_code code;
3809
3810 if (TREE_SIDE_EFFECTS (exp)
3811 || tree_could_trap_p (exp))
3812 return false;
3813
3814 while (CONVERT_EXPR_P (exp))
3815 exp = TREE_OPERAND (exp, 0);
3816
3817 code = TREE_CODE (exp);
3818
3819 if (TREE_CODE_CLASS (code) == tcc_comparison)
3820 return (simple_operand_p (TREE_OPERAND (exp, 0))
3821 && simple_operand_p (TREE_OPERAND (exp, 1)));
3822
3823 if (code == TRUTH_NOT_EXPR)
3824 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3825
3826 return simple_operand_p (exp);
3827 }
3828
3829 \f
3830 /* The following functions are subroutines to fold_range_test and allow it to
3831 try to change a logical combination of comparisons into a range test.
3832
3833 For example, both
3834 X == 2 || X == 3 || X == 4 || X == 5
3835 and
3836 X >= 2 && X <= 5
3837 are converted to
3838 (unsigned) (X - 2) <= 3
3839
3840 We describe each set of comparisons as being either inside or outside
3841 a range, using a variable named like IN_P, and then describe the
3842 range with a lower and upper bound. If one of the bounds is omitted,
3843 it represents either the highest or lowest value of the type.
3844
3845 In the comments below, we represent a range by two numbers in brackets
3846 preceded by a "+" to designate being inside that range, or a "-" to
3847 designate being outside that range, so the condition can be inverted by
3848 flipping the prefix. An omitted bound is represented by a "-". For
3849 example, "- [-, 10]" means being outside the range starting at the lowest
3850 possible value and ending at 10, in other words, being greater than 10.
3851 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3852 always false.
3853
3854 We set up things so that the missing bounds are handled in a consistent
3855 manner so neither a missing bound nor "true" and "false" need to be
3856 handled using a special case. */
3857
3858 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3859 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3860 and UPPER1_P are nonzero if the respective argument is an upper bound
3861 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3862 must be specified for a comparison. ARG1 will be converted to ARG0's
3863 type if both are specified. */
3864
3865 static tree
3866 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3867 tree arg1, int upper1_p)
3868 {
3869 tree tem;
3870 int result;
3871 int sgn0, sgn1;
3872
3873 /* If neither arg represents infinity, do the normal operation.
3874 Else, if not a comparison, return infinity. Else handle the special
3875 comparison rules. Note that most of the cases below won't occur, but
3876 are handled for consistency. */
3877
3878 if (arg0 != 0 && arg1 != 0)
3879 {
3880 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3881 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3882 STRIP_NOPS (tem);
3883 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3884 }
3885
3886 if (TREE_CODE_CLASS (code) != tcc_comparison)
3887 return 0;
3888
3889 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3890 for neither. In real maths, we cannot assume open ended ranges are
3891 the same. But, this is computer arithmetic, where numbers are finite.
3892 We can therefore make the transformation of any unbounded range with
3893 the value Z, Z being greater than any representable number. This permits
3894 us to treat unbounded ranges as equal. */
3895 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3896 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3897 switch (code)
3898 {
3899 case EQ_EXPR:
3900 result = sgn0 == sgn1;
3901 break;
3902 case NE_EXPR:
3903 result = sgn0 != sgn1;
3904 break;
3905 case LT_EXPR:
3906 result = sgn0 < sgn1;
3907 break;
3908 case LE_EXPR:
3909 result = sgn0 <= sgn1;
3910 break;
3911 case GT_EXPR:
3912 result = sgn0 > sgn1;
3913 break;
3914 case GE_EXPR:
3915 result = sgn0 >= sgn1;
3916 break;
3917 default:
3918 gcc_unreachable ();
3919 }
3920
3921 return constant_boolean_node (result, type);
3922 }
3923 \f
3924 /* Helper routine for make_range. Perform one step for it, return
3925 new expression if the loop should continue or NULL_TREE if it should
3926 stop. */
3927
3928 tree
3929 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3930 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3931 bool *strict_overflow_p)
3932 {
3933 tree arg0_type = TREE_TYPE (arg0);
3934 tree n_low, n_high, low = *p_low, high = *p_high;
3935 int in_p = *p_in_p, n_in_p;
3936
3937 switch (code)
3938 {
3939 case TRUTH_NOT_EXPR:
3940 /* We can only do something if the range is testing for zero. */
3941 if (low == NULL_TREE || high == NULL_TREE
3942 || ! integer_zerop (low) || ! integer_zerop (high))
3943 return NULL_TREE;
3944 *p_in_p = ! in_p;
3945 return arg0;
3946
3947 case EQ_EXPR: case NE_EXPR:
3948 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3949 /* We can only do something if the range is testing for zero
3950 and if the second operand is an integer constant. Note that
3951 saying something is "in" the range we make is done by
3952 complementing IN_P since it will set in the initial case of
3953 being not equal to zero; "out" is leaving it alone. */
3954 if (low == NULL_TREE || high == NULL_TREE
3955 || ! integer_zerop (low) || ! integer_zerop (high)
3956 || TREE_CODE (arg1) != INTEGER_CST)
3957 return NULL_TREE;
3958
3959 switch (code)
3960 {
3961 case NE_EXPR: /* - [c, c] */
3962 low = high = arg1;
3963 break;
3964 case EQ_EXPR: /* + [c, c] */
3965 in_p = ! in_p, low = high = arg1;
3966 break;
3967 case GT_EXPR: /* - [-, c] */
3968 low = 0, high = arg1;
3969 break;
3970 case GE_EXPR: /* + [c, -] */
3971 in_p = ! in_p, low = arg1, high = 0;
3972 break;
3973 case LT_EXPR: /* - [c, -] */
3974 low = arg1, high = 0;
3975 break;
3976 case LE_EXPR: /* + [-, c] */
3977 in_p = ! in_p, low = 0, high = arg1;
3978 break;
3979 default:
3980 gcc_unreachable ();
3981 }
3982
3983 /* If this is an unsigned comparison, we also know that EXP is
3984 greater than or equal to zero. We base the range tests we make
3985 on that fact, so we record it here so we can parse existing
3986 range tests. We test arg0_type since often the return type
3987 of, e.g. EQ_EXPR, is boolean. */
3988 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3989 {
3990 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3991 in_p, low, high, 1,
3992 build_int_cst (arg0_type, 0),
3993 NULL_TREE))
3994 return NULL_TREE;
3995
3996 in_p = n_in_p, low = n_low, high = n_high;
3997
3998 /* If the high bound is missing, but we have a nonzero low
3999 bound, reverse the range so it goes from zero to the low bound
4000 minus 1. */
4001 if (high == 0 && low && ! integer_zerop (low))
4002 {
4003 in_p = ! in_p;
4004 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4005 integer_one_node, 0);
4006 low = build_int_cst (arg0_type, 0);
4007 }
4008 }
4009
4010 *p_low = low;
4011 *p_high = high;
4012 *p_in_p = in_p;
4013 return arg0;
4014
4015 case NEGATE_EXPR:
4016 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4017 low and high are non-NULL, then normalize will DTRT. */
4018 if (!TYPE_UNSIGNED (arg0_type)
4019 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4020 {
4021 if (low == NULL_TREE)
4022 low = TYPE_MIN_VALUE (arg0_type);
4023 if (high == NULL_TREE)
4024 high = TYPE_MAX_VALUE (arg0_type);
4025 }
4026
4027 /* (-x) IN [a,b] -> x in [-b, -a] */
4028 n_low = range_binop (MINUS_EXPR, exp_type,
4029 build_int_cst (exp_type, 0),
4030 0, high, 1);
4031 n_high = range_binop (MINUS_EXPR, exp_type,
4032 build_int_cst (exp_type, 0),
4033 0, low, 0);
4034 if (n_high != 0 && TREE_OVERFLOW (n_high))
4035 return NULL_TREE;
4036 goto normalize;
4037
4038 case BIT_NOT_EXPR:
4039 /* ~ X -> -X - 1 */
4040 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4041 build_int_cst (exp_type, 1));
4042
4043 case PLUS_EXPR:
4044 case MINUS_EXPR:
4045 if (TREE_CODE (arg1) != INTEGER_CST)
4046 return NULL_TREE;
4047
4048 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4049 move a constant to the other side. */
4050 if (!TYPE_UNSIGNED (arg0_type)
4051 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4052 return NULL_TREE;
4053
4054 /* If EXP is signed, any overflow in the computation is undefined,
4055 so we don't worry about it so long as our computations on
4056 the bounds don't overflow. For unsigned, overflow is defined
4057 and this is exactly the right thing. */
4058 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4059 arg0_type, low, 0, arg1, 0);
4060 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4061 arg0_type, high, 1, arg1, 0);
4062 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4063 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4064 return NULL_TREE;
4065
4066 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4067 *strict_overflow_p = true;
4068
4069 normalize:
4070 /* Check for an unsigned range which has wrapped around the maximum
4071 value thus making n_high < n_low, and normalize it. */
4072 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4073 {
4074 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4075 integer_one_node, 0);
4076 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4077 integer_one_node, 0);
4078
4079 /* If the range is of the form +/- [ x+1, x ], we won't
4080 be able to normalize it. But then, it represents the
4081 whole range or the empty set, so make it
4082 +/- [ -, - ]. */
4083 if (tree_int_cst_equal (n_low, low)
4084 && tree_int_cst_equal (n_high, high))
4085 low = high = 0;
4086 else
4087 in_p = ! in_p;
4088 }
4089 else
4090 low = n_low, high = n_high;
4091
4092 *p_low = low;
4093 *p_high = high;
4094 *p_in_p = in_p;
4095 return arg0;
4096
4097 CASE_CONVERT:
4098 case NON_LVALUE_EXPR:
4099 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4100 return NULL_TREE;
4101
4102 if (! INTEGRAL_TYPE_P (arg0_type)
4103 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4104 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4105 return NULL_TREE;
4106
4107 n_low = low, n_high = high;
4108
4109 if (n_low != 0)
4110 n_low = fold_convert_loc (loc, arg0_type, n_low);
4111
4112 if (n_high != 0)
4113 n_high = fold_convert_loc (loc, arg0_type, n_high);
4114
4115 /* If we're converting arg0 from an unsigned type, to exp,
4116 a signed type, we will be doing the comparison as unsigned.
4117 The tests above have already verified that LOW and HIGH
4118 are both positive.
4119
4120 So we have to ensure that we will handle large unsigned
4121 values the same way that the current signed bounds treat
4122 negative values. */
4123
4124 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4125 {
4126 tree high_positive;
4127 tree equiv_type;
4128 /* For fixed-point modes, we need to pass the saturating flag
4129 as the 2nd parameter. */
4130 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4131 equiv_type
4132 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4133 TYPE_SATURATING (arg0_type));
4134 else
4135 equiv_type
4136 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4137
4138 /* A range without an upper bound is, naturally, unbounded.
4139 Since convert would have cropped a very large value, use
4140 the max value for the destination type. */
4141 high_positive
4142 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4143 : TYPE_MAX_VALUE (arg0_type);
4144
4145 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4146 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4147 fold_convert_loc (loc, arg0_type,
4148 high_positive),
4149 build_int_cst (arg0_type, 1));
4150
4151 /* If the low bound is specified, "and" the range with the
4152 range for which the original unsigned value will be
4153 positive. */
4154 if (low != 0)
4155 {
4156 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4157 1, fold_convert_loc (loc, arg0_type,
4158 integer_zero_node),
4159 high_positive))
4160 return NULL_TREE;
4161
4162 in_p = (n_in_p == in_p);
4163 }
4164 else
4165 {
4166 /* Otherwise, "or" the range with the range of the input
4167 that will be interpreted as negative. */
4168 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4169 1, fold_convert_loc (loc, arg0_type,
4170 integer_zero_node),
4171 high_positive))
4172 return NULL_TREE;
4173
4174 in_p = (in_p != n_in_p);
4175 }
4176 }
4177
4178 *p_low = n_low;
4179 *p_high = n_high;
4180 *p_in_p = in_p;
4181 return arg0;
4182
4183 default:
4184 return NULL_TREE;
4185 }
4186 }
4187
4188 /* Given EXP, a logical expression, set the range it is testing into
4189 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4190 actually being tested. *PLOW and *PHIGH will be made of the same
4191 type as the returned expression. If EXP is not a comparison, we
4192 will most likely not be returning a useful value and range. Set
4193 *STRICT_OVERFLOW_P to true if the return value is only valid
4194 because signed overflow is undefined; otherwise, do not change
4195 *STRICT_OVERFLOW_P. */
4196
4197 tree
4198 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4199 bool *strict_overflow_p)
4200 {
4201 enum tree_code code;
4202 tree arg0, arg1 = NULL_TREE;
4203 tree exp_type, nexp;
4204 int in_p;
4205 tree low, high;
4206 location_t loc = EXPR_LOCATION (exp);
4207
4208 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4209 and see if we can refine the range. Some of the cases below may not
4210 happen, but it doesn't seem worth worrying about this. We "continue"
4211 the outer loop when we've changed something; otherwise we "break"
4212 the switch, which will "break" the while. */
4213
4214 in_p = 0;
4215 low = high = build_int_cst (TREE_TYPE (exp), 0);
4216
4217 while (1)
4218 {
4219 code = TREE_CODE (exp);
4220 exp_type = TREE_TYPE (exp);
4221 arg0 = NULL_TREE;
4222
4223 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4224 {
4225 if (TREE_OPERAND_LENGTH (exp) > 0)
4226 arg0 = TREE_OPERAND (exp, 0);
4227 if (TREE_CODE_CLASS (code) == tcc_binary
4228 || TREE_CODE_CLASS (code) == tcc_comparison
4229 || (TREE_CODE_CLASS (code) == tcc_expression
4230 && TREE_OPERAND_LENGTH (exp) > 1))
4231 arg1 = TREE_OPERAND (exp, 1);
4232 }
4233 if (arg0 == NULL_TREE)
4234 break;
4235
4236 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4237 &high, &in_p, strict_overflow_p);
4238 if (nexp == NULL_TREE)
4239 break;
4240 exp = nexp;
4241 }
4242
4243 /* If EXP is a constant, we can evaluate whether this is true or false. */
4244 if (TREE_CODE (exp) == INTEGER_CST)
4245 {
4246 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4247 exp, 0, low, 0))
4248 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4249 exp, 1, high, 1)));
4250 low = high = 0;
4251 exp = 0;
4252 }
4253
4254 *pin_p = in_p, *plow = low, *phigh = high;
4255 return exp;
4256 }
4257 \f
4258 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4259 type, TYPE, return an expression to test if EXP is in (or out of, depending
4260 on IN_P) the range. Return 0 if the test couldn't be created. */
4261
4262 tree
4263 build_range_check (location_t loc, tree type, tree exp, int in_p,
4264 tree low, tree high)
4265 {
4266 tree etype = TREE_TYPE (exp), value;
4267
4268 #ifdef HAVE_canonicalize_funcptr_for_compare
4269 /* Disable this optimization for function pointer expressions
4270 on targets that require function pointer canonicalization. */
4271 if (HAVE_canonicalize_funcptr_for_compare
4272 && TREE_CODE (etype) == POINTER_TYPE
4273 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4274 return NULL_TREE;
4275 #endif
4276
4277 if (! in_p)
4278 {
4279 value = build_range_check (loc, type, exp, 1, low, high);
4280 if (value != 0)
4281 return invert_truthvalue_loc (loc, value);
4282
4283 return 0;
4284 }
4285
4286 if (low == 0 && high == 0)
4287 return build_int_cst (type, 1);
4288
4289 if (low == 0)
4290 return fold_build2_loc (loc, LE_EXPR, type, exp,
4291 fold_convert_loc (loc, etype, high));
4292
4293 if (high == 0)
4294 return fold_build2_loc (loc, GE_EXPR, type, exp,
4295 fold_convert_loc (loc, etype, low));
4296
4297 if (operand_equal_p (low, high, 0))
4298 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4299 fold_convert_loc (loc, etype, low));
4300
4301 if (integer_zerop (low))
4302 {
4303 if (! TYPE_UNSIGNED (etype))
4304 {
4305 etype = unsigned_type_for (etype);
4306 high = fold_convert_loc (loc, etype, high);
4307 exp = fold_convert_loc (loc, etype, exp);
4308 }
4309 return build_range_check (loc, type, exp, 1, 0, high);
4310 }
4311
4312 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4313 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4314 {
4315 unsigned HOST_WIDE_INT lo;
4316 HOST_WIDE_INT hi;
4317 int prec;
4318
4319 prec = TYPE_PRECISION (etype);
4320 if (prec <= HOST_BITS_PER_WIDE_INT)
4321 {
4322 hi = 0;
4323 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4324 }
4325 else
4326 {
4327 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4328 lo = (unsigned HOST_WIDE_INT) -1;
4329 }
4330
4331 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4332 {
4333 if (TYPE_UNSIGNED (etype))
4334 {
4335 tree signed_etype = signed_type_for (etype);
4336 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4337 etype
4338 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4339 else
4340 etype = signed_etype;
4341 exp = fold_convert_loc (loc, etype, exp);
4342 }
4343 return fold_build2_loc (loc, GT_EXPR, type, exp,
4344 build_int_cst (etype, 0));
4345 }
4346 }
4347
4348 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4349 This requires wrap-around arithmetics for the type of the expression.
4350 First make sure that arithmetics in this type is valid, then make sure
4351 that it wraps around. */
4352 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4353 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4354 TYPE_UNSIGNED (etype));
4355
4356 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4357 {
4358 tree utype, minv, maxv;
4359
4360 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4361 for the type in question, as we rely on this here. */
4362 utype = unsigned_type_for (etype);
4363 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4364 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4365 integer_one_node, 1);
4366 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4367
4368 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4369 minv, 1, maxv, 1)))
4370 etype = utype;
4371 else
4372 return 0;
4373 }
4374
4375 high = fold_convert_loc (loc, etype, high);
4376 low = fold_convert_loc (loc, etype, low);
4377 exp = fold_convert_loc (loc, etype, exp);
4378
4379 value = const_binop (MINUS_EXPR, high, low);
4380
4381
4382 if (POINTER_TYPE_P (etype))
4383 {
4384 if (value != 0 && !TREE_OVERFLOW (value))
4385 {
4386 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4387 return build_range_check (loc, type,
4388 fold_build_pointer_plus_loc (loc, exp, low),
4389 1, build_int_cst (etype, 0), value);
4390 }
4391 return 0;
4392 }
4393
4394 if (value != 0 && !TREE_OVERFLOW (value))
4395 return build_range_check (loc, type,
4396 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4397 1, build_int_cst (etype, 0), value);
4398
4399 return 0;
4400 }
4401 \f
4402 /* Return the predecessor of VAL in its type, handling the infinite case. */
4403
4404 static tree
4405 range_predecessor (tree val)
4406 {
4407 tree type = TREE_TYPE (val);
4408
4409 if (INTEGRAL_TYPE_P (type)
4410 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4411 return 0;
4412 else
4413 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4414 }
4415
4416 /* Return the successor of VAL in its type, handling the infinite case. */
4417
4418 static tree
4419 range_successor (tree val)
4420 {
4421 tree type = TREE_TYPE (val);
4422
4423 if (INTEGRAL_TYPE_P (type)
4424 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4425 return 0;
4426 else
4427 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4428 }
4429
4430 /* Given two ranges, see if we can merge them into one. Return 1 if we
4431 can, 0 if we can't. Set the output range into the specified parameters. */
4432
4433 bool
4434 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4435 tree high0, int in1_p, tree low1, tree high1)
4436 {
4437 int no_overlap;
4438 int subset;
4439 int temp;
4440 tree tem;
4441 int in_p;
4442 tree low, high;
4443 int lowequal = ((low0 == 0 && low1 == 0)
4444 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4445 low0, 0, low1, 0)));
4446 int highequal = ((high0 == 0 && high1 == 0)
4447 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4448 high0, 1, high1, 1)));
4449
4450 /* Make range 0 be the range that starts first, or ends last if they
4451 start at the same value. Swap them if it isn't. */
4452 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4453 low0, 0, low1, 0))
4454 || (lowequal
4455 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4456 high1, 1, high0, 1))))
4457 {
4458 temp = in0_p, in0_p = in1_p, in1_p = temp;
4459 tem = low0, low0 = low1, low1 = tem;
4460 tem = high0, high0 = high1, high1 = tem;
4461 }
4462
4463 /* Now flag two cases, whether the ranges are disjoint or whether the
4464 second range is totally subsumed in the first. Note that the tests
4465 below are simplified by the ones above. */
4466 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4467 high0, 1, low1, 0));
4468 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4469 high1, 1, high0, 1));
4470
4471 /* We now have four cases, depending on whether we are including or
4472 excluding the two ranges. */
4473 if (in0_p && in1_p)
4474 {
4475 /* If they don't overlap, the result is false. If the second range
4476 is a subset it is the result. Otherwise, the range is from the start
4477 of the second to the end of the first. */
4478 if (no_overlap)
4479 in_p = 0, low = high = 0;
4480 else if (subset)
4481 in_p = 1, low = low1, high = high1;
4482 else
4483 in_p = 1, low = low1, high = high0;
4484 }
4485
4486 else if (in0_p && ! in1_p)
4487 {
4488 /* If they don't overlap, the result is the first range. If they are
4489 equal, the result is false. If the second range is a subset of the
4490 first, and the ranges begin at the same place, we go from just after
4491 the end of the second range to the end of the first. If the second
4492 range is not a subset of the first, or if it is a subset and both
4493 ranges end at the same place, the range starts at the start of the
4494 first range and ends just before the second range.
4495 Otherwise, we can't describe this as a single range. */
4496 if (no_overlap)
4497 in_p = 1, low = low0, high = high0;
4498 else if (lowequal && highequal)
4499 in_p = 0, low = high = 0;
4500 else if (subset && lowequal)
4501 {
4502 low = range_successor (high1);
4503 high = high0;
4504 in_p = 1;
4505 if (low == 0)
4506 {
4507 /* We are in the weird situation where high0 > high1 but
4508 high1 has no successor. Punt. */
4509 return 0;
4510 }
4511 }
4512 else if (! subset || highequal)
4513 {
4514 low = low0;
4515 high = range_predecessor (low1);
4516 in_p = 1;
4517 if (high == 0)
4518 {
4519 /* low0 < low1 but low1 has no predecessor. Punt. */
4520 return 0;
4521 }
4522 }
4523 else
4524 return 0;
4525 }
4526
4527 else if (! in0_p && in1_p)
4528 {
4529 /* If they don't overlap, the result is the second range. If the second
4530 is a subset of the first, the result is false. Otherwise,
4531 the range starts just after the first range and ends at the
4532 end of the second. */
4533 if (no_overlap)
4534 in_p = 1, low = low1, high = high1;
4535 else if (subset || highequal)
4536 in_p = 0, low = high = 0;
4537 else
4538 {
4539 low = range_successor (high0);
4540 high = high1;
4541 in_p = 1;
4542 if (low == 0)
4543 {
4544 /* high1 > high0 but high0 has no successor. Punt. */
4545 return 0;
4546 }
4547 }
4548 }
4549
4550 else
4551 {
4552 /* The case where we are excluding both ranges. Here the complex case
4553 is if they don't overlap. In that case, the only time we have a
4554 range is if they are adjacent. If the second is a subset of the
4555 first, the result is the first. Otherwise, the range to exclude
4556 starts at the beginning of the first range and ends at the end of the
4557 second. */
4558 if (no_overlap)
4559 {
4560 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4561 range_successor (high0),
4562 1, low1, 0)))
4563 in_p = 0, low = low0, high = high1;
4564 else
4565 {
4566 /* Canonicalize - [min, x] into - [-, x]. */
4567 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4568 switch (TREE_CODE (TREE_TYPE (low0)))
4569 {
4570 case ENUMERAL_TYPE:
4571 if (TYPE_PRECISION (TREE_TYPE (low0))
4572 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4573 break;
4574 /* FALLTHROUGH */
4575 case INTEGER_TYPE:
4576 if (tree_int_cst_equal (low0,
4577 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4578 low0 = 0;
4579 break;
4580 case POINTER_TYPE:
4581 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4582 && integer_zerop (low0))
4583 low0 = 0;
4584 break;
4585 default:
4586 break;
4587 }
4588
4589 /* Canonicalize - [x, max] into - [x, -]. */
4590 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4591 switch (TREE_CODE (TREE_TYPE (high1)))
4592 {
4593 case ENUMERAL_TYPE:
4594 if (TYPE_PRECISION (TREE_TYPE (high1))
4595 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4596 break;
4597 /* FALLTHROUGH */
4598 case INTEGER_TYPE:
4599 if (tree_int_cst_equal (high1,
4600 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4601 high1 = 0;
4602 break;
4603 case POINTER_TYPE:
4604 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4605 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4606 high1, 1,
4607 integer_one_node, 1)))
4608 high1 = 0;
4609 break;
4610 default:
4611 break;
4612 }
4613
4614 /* The ranges might be also adjacent between the maximum and
4615 minimum values of the given type. For
4616 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4617 return + [x + 1, y - 1]. */
4618 if (low0 == 0 && high1 == 0)
4619 {
4620 low = range_successor (high0);
4621 high = range_predecessor (low1);
4622 if (low == 0 || high == 0)
4623 return 0;
4624
4625 in_p = 1;
4626 }
4627 else
4628 return 0;
4629 }
4630 }
4631 else if (subset)
4632 in_p = 0, low = low0, high = high0;
4633 else
4634 in_p = 0, low = low0, high = high1;
4635 }
4636
4637 *pin_p = in_p, *plow = low, *phigh = high;
4638 return 1;
4639 }
4640 \f
4641
4642 /* Subroutine of fold, looking inside expressions of the form
4643 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4644 of the COND_EXPR. This function is being used also to optimize
4645 A op B ? C : A, by reversing the comparison first.
4646
4647 Return a folded expression whose code is not a COND_EXPR
4648 anymore, or NULL_TREE if no folding opportunity is found. */
4649
4650 static tree
4651 fold_cond_expr_with_comparison (location_t loc, tree type,
4652 tree arg0, tree arg1, tree arg2)
4653 {
4654 enum tree_code comp_code = TREE_CODE (arg0);
4655 tree arg00 = TREE_OPERAND (arg0, 0);
4656 tree arg01 = TREE_OPERAND (arg0, 1);
4657 tree arg1_type = TREE_TYPE (arg1);
4658 tree tem;
4659
4660 STRIP_NOPS (arg1);
4661 STRIP_NOPS (arg2);
4662
4663 /* If we have A op 0 ? A : -A, consider applying the following
4664 transformations:
4665
4666 A == 0? A : -A same as -A
4667 A != 0? A : -A same as A
4668 A >= 0? A : -A same as abs (A)
4669 A > 0? A : -A same as abs (A)
4670 A <= 0? A : -A same as -abs (A)
4671 A < 0? A : -A same as -abs (A)
4672
4673 None of these transformations work for modes with signed
4674 zeros. If A is +/-0, the first two transformations will
4675 change the sign of the result (from +0 to -0, or vice
4676 versa). The last four will fix the sign of the result,
4677 even though the original expressions could be positive or
4678 negative, depending on the sign of A.
4679
4680 Note that all these transformations are correct if A is
4681 NaN, since the two alternatives (A and -A) are also NaNs. */
4682 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4683 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4684 ? real_zerop (arg01)
4685 : integer_zerop (arg01))
4686 && ((TREE_CODE (arg2) == NEGATE_EXPR
4687 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4688 /* In the case that A is of the form X-Y, '-A' (arg2) may
4689 have already been folded to Y-X, check for that. */
4690 || (TREE_CODE (arg1) == MINUS_EXPR
4691 && TREE_CODE (arg2) == MINUS_EXPR
4692 && operand_equal_p (TREE_OPERAND (arg1, 0),
4693 TREE_OPERAND (arg2, 1), 0)
4694 && operand_equal_p (TREE_OPERAND (arg1, 1),
4695 TREE_OPERAND (arg2, 0), 0))))
4696 switch (comp_code)
4697 {
4698 case EQ_EXPR:
4699 case UNEQ_EXPR:
4700 tem = fold_convert_loc (loc, arg1_type, arg1);
4701 return pedantic_non_lvalue_loc (loc,
4702 fold_convert_loc (loc, type,
4703 negate_expr (tem)));
4704 case NE_EXPR:
4705 case LTGT_EXPR:
4706 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4707 case UNGE_EXPR:
4708 case UNGT_EXPR:
4709 if (flag_trapping_math)
4710 break;
4711 /* Fall through. */
4712 case GE_EXPR:
4713 case GT_EXPR:
4714 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4715 arg1 = fold_convert_loc (loc, signed_type_for
4716 (TREE_TYPE (arg1)), arg1);
4717 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4718 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4719 case UNLE_EXPR:
4720 case UNLT_EXPR:
4721 if (flag_trapping_math)
4722 break;
4723 case LE_EXPR:
4724 case LT_EXPR:
4725 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4726 arg1 = fold_convert_loc (loc, signed_type_for
4727 (TREE_TYPE (arg1)), arg1);
4728 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4729 return negate_expr (fold_convert_loc (loc, type, tem));
4730 default:
4731 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4732 break;
4733 }
4734
4735 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4736 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4737 both transformations are correct when A is NaN: A != 0
4738 is then true, and A == 0 is false. */
4739
4740 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4741 && integer_zerop (arg01) && integer_zerop (arg2))
4742 {
4743 if (comp_code == NE_EXPR)
4744 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4745 else if (comp_code == EQ_EXPR)
4746 return build_zero_cst (type);
4747 }
4748
4749 /* Try some transformations of A op B ? A : B.
4750
4751 A == B? A : B same as B
4752 A != B? A : B same as A
4753 A >= B? A : B same as max (A, B)
4754 A > B? A : B same as max (B, A)
4755 A <= B? A : B same as min (A, B)
4756 A < B? A : B same as min (B, A)
4757
4758 As above, these transformations don't work in the presence
4759 of signed zeros. For example, if A and B are zeros of
4760 opposite sign, the first two transformations will change
4761 the sign of the result. In the last four, the original
4762 expressions give different results for (A=+0, B=-0) and
4763 (A=-0, B=+0), but the transformed expressions do not.
4764
4765 The first two transformations are correct if either A or B
4766 is a NaN. In the first transformation, the condition will
4767 be false, and B will indeed be chosen. In the case of the
4768 second transformation, the condition A != B will be true,
4769 and A will be chosen.
4770
4771 The conversions to max() and min() are not correct if B is
4772 a number and A is not. The conditions in the original
4773 expressions will be false, so all four give B. The min()
4774 and max() versions would give a NaN instead. */
4775 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4776 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4777 /* Avoid these transformations if the COND_EXPR may be used
4778 as an lvalue in the C++ front-end. PR c++/19199. */
4779 && (in_gimple_form
4780 || VECTOR_TYPE_P (type)
4781 || (strcmp (lang_hooks.name, "GNU C++") != 0
4782 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4783 || ! maybe_lvalue_p (arg1)
4784 || ! maybe_lvalue_p (arg2)))
4785 {
4786 tree comp_op0 = arg00;
4787 tree comp_op1 = arg01;
4788 tree comp_type = TREE_TYPE (comp_op0);
4789
4790 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4791 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4792 {
4793 comp_type = type;
4794 comp_op0 = arg1;
4795 comp_op1 = arg2;
4796 }
4797
4798 switch (comp_code)
4799 {
4800 case EQ_EXPR:
4801 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4802 case NE_EXPR:
4803 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4804 case LE_EXPR:
4805 case LT_EXPR:
4806 case UNLE_EXPR:
4807 case UNLT_EXPR:
4808 /* In C++ a ?: expression can be an lvalue, so put the
4809 operand which will be used if they are equal first
4810 so that we can convert this back to the
4811 corresponding COND_EXPR. */
4812 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4813 {
4814 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4815 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4816 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4817 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4818 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4819 comp_op1, comp_op0);
4820 return pedantic_non_lvalue_loc (loc,
4821 fold_convert_loc (loc, type, tem));
4822 }
4823 break;
4824 case GE_EXPR:
4825 case GT_EXPR:
4826 case UNGE_EXPR:
4827 case UNGT_EXPR:
4828 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4829 {
4830 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4831 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4832 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4833 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4834 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4835 comp_op1, comp_op0);
4836 return pedantic_non_lvalue_loc (loc,
4837 fold_convert_loc (loc, type, tem));
4838 }
4839 break;
4840 case UNEQ_EXPR:
4841 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4842 return pedantic_non_lvalue_loc (loc,
4843 fold_convert_loc (loc, type, arg2));
4844 break;
4845 case LTGT_EXPR:
4846 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4847 return pedantic_non_lvalue_loc (loc,
4848 fold_convert_loc (loc, type, arg1));
4849 break;
4850 default:
4851 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4852 break;
4853 }
4854 }
4855
4856 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4857 we might still be able to simplify this. For example,
4858 if C1 is one less or one more than C2, this might have started
4859 out as a MIN or MAX and been transformed by this function.
4860 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4861
4862 if (INTEGRAL_TYPE_P (type)
4863 && TREE_CODE (arg01) == INTEGER_CST
4864 && TREE_CODE (arg2) == INTEGER_CST)
4865 switch (comp_code)
4866 {
4867 case EQ_EXPR:
4868 if (TREE_CODE (arg1) == INTEGER_CST)
4869 break;
4870 /* We can replace A with C1 in this case. */
4871 arg1 = fold_convert_loc (loc, type, arg01);
4872 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4873
4874 case LT_EXPR:
4875 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4876 MIN_EXPR, to preserve the signedness of the comparison. */
4877 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4878 OEP_ONLY_CONST)
4879 && operand_equal_p (arg01,
4880 const_binop (PLUS_EXPR, arg2,
4881 build_int_cst (type, 1)),
4882 OEP_ONLY_CONST))
4883 {
4884 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4885 fold_convert_loc (loc, TREE_TYPE (arg00),
4886 arg2));
4887 return pedantic_non_lvalue_loc (loc,
4888 fold_convert_loc (loc, type, tem));
4889 }
4890 break;
4891
4892 case LE_EXPR:
4893 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4894 as above. */
4895 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4896 OEP_ONLY_CONST)
4897 && operand_equal_p (arg01,
4898 const_binop (MINUS_EXPR, arg2,
4899 build_int_cst (type, 1)),
4900 OEP_ONLY_CONST))
4901 {
4902 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4903 fold_convert_loc (loc, TREE_TYPE (arg00),
4904 arg2));
4905 return pedantic_non_lvalue_loc (loc,
4906 fold_convert_loc (loc, type, tem));
4907 }
4908 break;
4909
4910 case GT_EXPR:
4911 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4912 MAX_EXPR, to preserve the signedness of the comparison. */
4913 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4914 OEP_ONLY_CONST)
4915 && operand_equal_p (arg01,
4916 const_binop (MINUS_EXPR, arg2,
4917 build_int_cst (type, 1)),
4918 OEP_ONLY_CONST))
4919 {
4920 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4921 fold_convert_loc (loc, TREE_TYPE (arg00),
4922 arg2));
4923 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4924 }
4925 break;
4926
4927 case GE_EXPR:
4928 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4929 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4930 OEP_ONLY_CONST)
4931 && operand_equal_p (arg01,
4932 const_binop (PLUS_EXPR, arg2,
4933 build_int_cst (type, 1)),
4934 OEP_ONLY_CONST))
4935 {
4936 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4937 fold_convert_loc (loc, TREE_TYPE (arg00),
4938 arg2));
4939 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4940 }
4941 break;
4942 case NE_EXPR:
4943 break;
4944 default:
4945 gcc_unreachable ();
4946 }
4947
4948 return NULL_TREE;
4949 }
4950
4951
4952 \f
4953 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4954 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4955 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4956 false) >= 2)
4957 #endif
4958
4959 /* EXP is some logical combination of boolean tests. See if we can
4960 merge it into some range test. Return the new tree if so. */
4961
4962 static tree
4963 fold_range_test (location_t loc, enum tree_code code, tree type,
4964 tree op0, tree op1)
4965 {
4966 int or_op = (code == TRUTH_ORIF_EXPR
4967 || code == TRUTH_OR_EXPR);
4968 int in0_p, in1_p, in_p;
4969 tree low0, low1, low, high0, high1, high;
4970 bool strict_overflow_p = false;
4971 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4972 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4973 tree tem;
4974 const char * const warnmsg = G_("assuming signed overflow does not occur "
4975 "when simplifying range test");
4976
4977 /* If this is an OR operation, invert both sides; we will invert
4978 again at the end. */
4979 if (or_op)
4980 in0_p = ! in0_p, in1_p = ! in1_p;
4981
4982 /* If both expressions are the same, if we can merge the ranges, and we
4983 can build the range test, return it or it inverted. If one of the
4984 ranges is always true or always false, consider it to be the same
4985 expression as the other. */
4986 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4987 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4988 in1_p, low1, high1)
4989 && 0 != (tem = (build_range_check (loc, type,
4990 lhs != 0 ? lhs
4991 : rhs != 0 ? rhs : integer_zero_node,
4992 in_p, low, high))))
4993 {
4994 if (strict_overflow_p)
4995 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4996 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4997 }
4998
4999 /* On machines where the branch cost is expensive, if this is a
5000 short-circuited branch and the underlying object on both sides
5001 is the same, make a non-short-circuit operation. */
5002 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5003 && lhs != 0 && rhs != 0
5004 && (code == TRUTH_ANDIF_EXPR
5005 || code == TRUTH_ORIF_EXPR)
5006 && operand_equal_p (lhs, rhs, 0))
5007 {
5008 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5009 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5010 which cases we can't do this. */
5011 if (simple_operand_p (lhs))
5012 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5013 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5014 type, op0, op1);
5015
5016 else if (!lang_hooks.decls.global_bindings_p ()
5017 && !CONTAINS_PLACEHOLDER_P (lhs))
5018 {
5019 tree common = save_expr (lhs);
5020
5021 if (0 != (lhs = build_range_check (loc, type, common,
5022 or_op ? ! in0_p : in0_p,
5023 low0, high0))
5024 && (0 != (rhs = build_range_check (loc, type, common,
5025 or_op ? ! in1_p : in1_p,
5026 low1, high1))))
5027 {
5028 if (strict_overflow_p)
5029 fold_overflow_warning (warnmsg,
5030 WARN_STRICT_OVERFLOW_COMPARISON);
5031 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5032 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5033 type, lhs, rhs);
5034 }
5035 }
5036 }
5037
5038 return 0;
5039 }
5040 \f
5041 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5042 bit value. Arrange things so the extra bits will be set to zero if and
5043 only if C is signed-extended to its full width. If MASK is nonzero,
5044 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5045
5046 static tree
5047 unextend (tree c, int p, int unsignedp, tree mask)
5048 {
5049 tree type = TREE_TYPE (c);
5050 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5051 tree temp;
5052
5053 if (p == modesize || unsignedp)
5054 return c;
5055
5056 /* We work by getting just the sign bit into the low-order bit, then
5057 into the high-order bit, then sign-extend. We then XOR that value
5058 with C. */
5059 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5060 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5061
5062 /* We must use a signed type in order to get an arithmetic right shift.
5063 However, we must also avoid introducing accidental overflows, so that
5064 a subsequent call to integer_zerop will work. Hence we must
5065 do the type conversion here. At this point, the constant is either
5066 zero or one, and the conversion to a signed type can never overflow.
5067 We could get an overflow if this conversion is done anywhere else. */
5068 if (TYPE_UNSIGNED (type))
5069 temp = fold_convert (signed_type_for (type), temp);
5070
5071 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5072 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5073 if (mask != 0)
5074 temp = const_binop (BIT_AND_EXPR, temp,
5075 fold_convert (TREE_TYPE (c), mask));
5076 /* If necessary, convert the type back to match the type of C. */
5077 if (TYPE_UNSIGNED (type))
5078 temp = fold_convert (type, temp);
5079
5080 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5081 }
5082 \f
5083 /* For an expression that has the form
5084 (A && B) || ~B
5085 or
5086 (A || B) && ~B,
5087 we can drop one of the inner expressions and simplify to
5088 A || ~B
5089 or
5090 A && ~B
5091 LOC is the location of the resulting expression. OP is the inner
5092 logical operation; the left-hand side in the examples above, while CMPOP
5093 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5094 removing a condition that guards another, as in
5095 (A != NULL && A->...) || A == NULL
5096 which we must not transform. If RHS_ONLY is true, only eliminate the
5097 right-most operand of the inner logical operation. */
5098
5099 static tree
5100 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5101 bool rhs_only)
5102 {
5103 tree type = TREE_TYPE (cmpop);
5104 enum tree_code code = TREE_CODE (cmpop);
5105 enum tree_code truthop_code = TREE_CODE (op);
5106 tree lhs = TREE_OPERAND (op, 0);
5107 tree rhs = TREE_OPERAND (op, 1);
5108 tree orig_lhs = lhs, orig_rhs = rhs;
5109 enum tree_code rhs_code = TREE_CODE (rhs);
5110 enum tree_code lhs_code = TREE_CODE (lhs);
5111 enum tree_code inv_code;
5112
5113 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5114 return NULL_TREE;
5115
5116 if (TREE_CODE_CLASS (code) != tcc_comparison)
5117 return NULL_TREE;
5118
5119 if (rhs_code == truthop_code)
5120 {
5121 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5122 if (newrhs != NULL_TREE)
5123 {
5124 rhs = newrhs;
5125 rhs_code = TREE_CODE (rhs);
5126 }
5127 }
5128 if (lhs_code == truthop_code && !rhs_only)
5129 {
5130 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5131 if (newlhs != NULL_TREE)
5132 {
5133 lhs = newlhs;
5134 lhs_code = TREE_CODE (lhs);
5135 }
5136 }
5137
5138 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5139 if (inv_code == rhs_code
5140 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5141 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5142 return lhs;
5143 if (!rhs_only && inv_code == lhs_code
5144 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5145 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5146 return rhs;
5147 if (rhs != orig_rhs || lhs != orig_lhs)
5148 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5149 lhs, rhs);
5150 return NULL_TREE;
5151 }
5152
5153 /* Find ways of folding logical expressions of LHS and RHS:
5154 Try to merge two comparisons to the same innermost item.
5155 Look for range tests like "ch >= '0' && ch <= '9'".
5156 Look for combinations of simple terms on machines with expensive branches
5157 and evaluate the RHS unconditionally.
5158
5159 For example, if we have p->a == 2 && p->b == 4 and we can make an
5160 object large enough to span both A and B, we can do this with a comparison
5161 against the object ANDed with the a mask.
5162
5163 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5164 operations to do this with one comparison.
5165
5166 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5167 function and the one above.
5168
5169 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5170 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5171
5172 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5173 two operands.
5174
5175 We return the simplified tree or 0 if no optimization is possible. */
5176
5177 static tree
5178 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5179 tree lhs, tree rhs)
5180 {
5181 /* If this is the "or" of two comparisons, we can do something if
5182 the comparisons are NE_EXPR. If this is the "and", we can do something
5183 if the comparisons are EQ_EXPR. I.e.,
5184 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5185
5186 WANTED_CODE is this operation code. For single bit fields, we can
5187 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5188 comparison for one-bit fields. */
5189
5190 enum tree_code wanted_code;
5191 enum tree_code lcode, rcode;
5192 tree ll_arg, lr_arg, rl_arg, rr_arg;
5193 tree ll_inner, lr_inner, rl_inner, rr_inner;
5194 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5195 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5196 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5197 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5198 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5199 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5200 enum machine_mode lnmode, rnmode;
5201 tree ll_mask, lr_mask, rl_mask, rr_mask;
5202 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5203 tree l_const, r_const;
5204 tree lntype, rntype, result;
5205 HOST_WIDE_INT first_bit, end_bit;
5206 int volatilep;
5207
5208 /* Start by getting the comparison codes. Fail if anything is volatile.
5209 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5210 it were surrounded with a NE_EXPR. */
5211
5212 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5213 return 0;
5214
5215 lcode = TREE_CODE (lhs);
5216 rcode = TREE_CODE (rhs);
5217
5218 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5219 {
5220 lhs = build2 (NE_EXPR, truth_type, lhs,
5221 build_int_cst (TREE_TYPE (lhs), 0));
5222 lcode = NE_EXPR;
5223 }
5224
5225 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5226 {
5227 rhs = build2 (NE_EXPR, truth_type, rhs,
5228 build_int_cst (TREE_TYPE (rhs), 0));
5229 rcode = NE_EXPR;
5230 }
5231
5232 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5233 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5234 return 0;
5235
5236 ll_arg = TREE_OPERAND (lhs, 0);
5237 lr_arg = TREE_OPERAND (lhs, 1);
5238 rl_arg = TREE_OPERAND (rhs, 0);
5239 rr_arg = TREE_OPERAND (rhs, 1);
5240
5241 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5242 if (simple_operand_p (ll_arg)
5243 && simple_operand_p (lr_arg))
5244 {
5245 if (operand_equal_p (ll_arg, rl_arg, 0)
5246 && operand_equal_p (lr_arg, rr_arg, 0))
5247 {
5248 result = combine_comparisons (loc, code, lcode, rcode,
5249 truth_type, ll_arg, lr_arg);
5250 if (result)
5251 return result;
5252 }
5253 else if (operand_equal_p (ll_arg, rr_arg, 0)
5254 && operand_equal_p (lr_arg, rl_arg, 0))
5255 {
5256 result = combine_comparisons (loc, code, lcode,
5257 swap_tree_comparison (rcode),
5258 truth_type, ll_arg, lr_arg);
5259 if (result)
5260 return result;
5261 }
5262 }
5263
5264 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5265 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5266
5267 /* If the RHS can be evaluated unconditionally and its operands are
5268 simple, it wins to evaluate the RHS unconditionally on machines
5269 with expensive branches. In this case, this isn't a comparison
5270 that can be merged. */
5271
5272 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5273 false) >= 2
5274 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5275 && simple_operand_p (rl_arg)
5276 && simple_operand_p (rr_arg))
5277 {
5278 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5279 if (code == TRUTH_OR_EXPR
5280 && lcode == NE_EXPR && integer_zerop (lr_arg)
5281 && rcode == NE_EXPR && integer_zerop (rr_arg)
5282 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5283 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5284 return build2_loc (loc, NE_EXPR, truth_type,
5285 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5286 ll_arg, rl_arg),
5287 build_int_cst (TREE_TYPE (ll_arg), 0));
5288
5289 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5290 if (code == TRUTH_AND_EXPR
5291 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5292 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5293 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5294 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5295 return build2_loc (loc, EQ_EXPR, truth_type,
5296 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5297 ll_arg, rl_arg),
5298 build_int_cst (TREE_TYPE (ll_arg), 0));
5299 }
5300
5301 /* See if the comparisons can be merged. Then get all the parameters for
5302 each side. */
5303
5304 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5305 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5306 return 0;
5307
5308 volatilep = 0;
5309 ll_inner = decode_field_reference (loc, ll_arg,
5310 &ll_bitsize, &ll_bitpos, &ll_mode,
5311 &ll_unsignedp, &volatilep, &ll_mask,
5312 &ll_and_mask);
5313 lr_inner = decode_field_reference (loc, lr_arg,
5314 &lr_bitsize, &lr_bitpos, &lr_mode,
5315 &lr_unsignedp, &volatilep, &lr_mask,
5316 &lr_and_mask);
5317 rl_inner = decode_field_reference (loc, rl_arg,
5318 &rl_bitsize, &rl_bitpos, &rl_mode,
5319 &rl_unsignedp, &volatilep, &rl_mask,
5320 &rl_and_mask);
5321 rr_inner = decode_field_reference (loc, rr_arg,
5322 &rr_bitsize, &rr_bitpos, &rr_mode,
5323 &rr_unsignedp, &volatilep, &rr_mask,
5324 &rr_and_mask);
5325
5326 /* It must be true that the inner operation on the lhs of each
5327 comparison must be the same if we are to be able to do anything.
5328 Then see if we have constants. If not, the same must be true for
5329 the rhs's. */
5330 if (volatilep || ll_inner == 0 || rl_inner == 0
5331 || ! operand_equal_p (ll_inner, rl_inner, 0))
5332 return 0;
5333
5334 if (TREE_CODE (lr_arg) == INTEGER_CST
5335 && TREE_CODE (rr_arg) == INTEGER_CST)
5336 l_const = lr_arg, r_const = rr_arg;
5337 else if (lr_inner == 0 || rr_inner == 0
5338 || ! operand_equal_p (lr_inner, rr_inner, 0))
5339 return 0;
5340 else
5341 l_const = r_const = 0;
5342
5343 /* If either comparison code is not correct for our logical operation,
5344 fail. However, we can convert a one-bit comparison against zero into
5345 the opposite comparison against that bit being set in the field. */
5346
5347 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5348 if (lcode != wanted_code)
5349 {
5350 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5351 {
5352 /* Make the left operand unsigned, since we are only interested
5353 in the value of one bit. Otherwise we are doing the wrong
5354 thing below. */
5355 ll_unsignedp = 1;
5356 l_const = ll_mask;
5357 }
5358 else
5359 return 0;
5360 }
5361
5362 /* This is analogous to the code for l_const above. */
5363 if (rcode != wanted_code)
5364 {
5365 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5366 {
5367 rl_unsignedp = 1;
5368 r_const = rl_mask;
5369 }
5370 else
5371 return 0;
5372 }
5373
5374 /* See if we can find a mode that contains both fields being compared on
5375 the left. If we can't, fail. Otherwise, update all constants and masks
5376 to be relative to a field of that size. */
5377 first_bit = MIN (ll_bitpos, rl_bitpos);
5378 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5379 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5380 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5381 volatilep);
5382 if (lnmode == VOIDmode)
5383 return 0;
5384
5385 lnbitsize = GET_MODE_BITSIZE (lnmode);
5386 lnbitpos = first_bit & ~ (lnbitsize - 1);
5387 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5388 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5389
5390 if (BYTES_BIG_ENDIAN)
5391 {
5392 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5393 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5394 }
5395
5396 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5397 size_int (xll_bitpos));
5398 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5399 size_int (xrl_bitpos));
5400
5401 if (l_const)
5402 {
5403 l_const = fold_convert_loc (loc, lntype, l_const);
5404 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5405 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5406 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5407 fold_build1_loc (loc, BIT_NOT_EXPR,
5408 lntype, ll_mask))))
5409 {
5410 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5411
5412 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5413 }
5414 }
5415 if (r_const)
5416 {
5417 r_const = fold_convert_loc (loc, lntype, r_const);
5418 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5419 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5420 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5421 fold_build1_loc (loc, BIT_NOT_EXPR,
5422 lntype, rl_mask))))
5423 {
5424 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5425
5426 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5427 }
5428 }
5429
5430 /* If the right sides are not constant, do the same for it. Also,
5431 disallow this optimization if a size or signedness mismatch occurs
5432 between the left and right sides. */
5433 if (l_const == 0)
5434 {
5435 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5436 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5437 /* Make sure the two fields on the right
5438 correspond to the left without being swapped. */
5439 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5440 return 0;
5441
5442 first_bit = MIN (lr_bitpos, rr_bitpos);
5443 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5444 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5445 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5446 volatilep);
5447 if (rnmode == VOIDmode)
5448 return 0;
5449
5450 rnbitsize = GET_MODE_BITSIZE (rnmode);
5451 rnbitpos = first_bit & ~ (rnbitsize - 1);
5452 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5453 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5454
5455 if (BYTES_BIG_ENDIAN)
5456 {
5457 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5458 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5459 }
5460
5461 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5462 rntype, lr_mask),
5463 size_int (xlr_bitpos));
5464 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5465 rntype, rr_mask),
5466 size_int (xrr_bitpos));
5467
5468 /* Make a mask that corresponds to both fields being compared.
5469 Do this for both items being compared. If the operands are the
5470 same size and the bits being compared are in the same position
5471 then we can do this by masking both and comparing the masked
5472 results. */
5473 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5474 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5475 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5476 {
5477 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5478 ll_unsignedp || rl_unsignedp);
5479 if (! all_ones_mask_p (ll_mask, lnbitsize))
5480 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5481
5482 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5483 lr_unsignedp || rr_unsignedp);
5484 if (! all_ones_mask_p (lr_mask, rnbitsize))
5485 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5486
5487 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5488 }
5489
5490 /* There is still another way we can do something: If both pairs of
5491 fields being compared are adjacent, we may be able to make a wider
5492 field containing them both.
5493
5494 Note that we still must mask the lhs/rhs expressions. Furthermore,
5495 the mask must be shifted to account for the shift done by
5496 make_bit_field_ref. */
5497 if ((ll_bitsize + ll_bitpos == rl_bitpos
5498 && lr_bitsize + lr_bitpos == rr_bitpos)
5499 || (ll_bitpos == rl_bitpos + rl_bitsize
5500 && lr_bitpos == rr_bitpos + rr_bitsize))
5501 {
5502 tree type;
5503
5504 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5505 ll_bitsize + rl_bitsize,
5506 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5507 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5508 lr_bitsize + rr_bitsize,
5509 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5510
5511 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5512 size_int (MIN (xll_bitpos, xrl_bitpos)));
5513 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5514 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5515
5516 /* Convert to the smaller type before masking out unwanted bits. */
5517 type = lntype;
5518 if (lntype != rntype)
5519 {
5520 if (lnbitsize > rnbitsize)
5521 {
5522 lhs = fold_convert_loc (loc, rntype, lhs);
5523 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5524 type = rntype;
5525 }
5526 else if (lnbitsize < rnbitsize)
5527 {
5528 rhs = fold_convert_loc (loc, lntype, rhs);
5529 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5530 type = lntype;
5531 }
5532 }
5533
5534 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5535 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5536
5537 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5538 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5539
5540 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5541 }
5542
5543 return 0;
5544 }
5545
5546 /* Handle the case of comparisons with constants. If there is something in
5547 common between the masks, those bits of the constants must be the same.
5548 If not, the condition is always false. Test for this to avoid generating
5549 incorrect code below. */
5550 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5551 if (! integer_zerop (result)
5552 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5553 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5554 {
5555 if (wanted_code == NE_EXPR)
5556 {
5557 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5558 return constant_boolean_node (true, truth_type);
5559 }
5560 else
5561 {
5562 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5563 return constant_boolean_node (false, truth_type);
5564 }
5565 }
5566
5567 /* Construct the expression we will return. First get the component
5568 reference we will make. Unless the mask is all ones the width of
5569 that field, perform the mask operation. Then compare with the
5570 merged constant. */
5571 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5572 ll_unsignedp || rl_unsignedp);
5573
5574 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5575 if (! all_ones_mask_p (ll_mask, lnbitsize))
5576 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5577
5578 return build2_loc (loc, wanted_code, truth_type, result,
5579 const_binop (BIT_IOR_EXPR, l_const, r_const));
5580 }
5581 \f
5582 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5583 constant. */
5584
5585 static tree
5586 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5587 tree op0, tree op1)
5588 {
5589 tree arg0 = op0;
5590 enum tree_code op_code;
5591 tree comp_const;
5592 tree minmax_const;
5593 int consts_equal, consts_lt;
5594 tree inner;
5595
5596 STRIP_SIGN_NOPS (arg0);
5597
5598 op_code = TREE_CODE (arg0);
5599 minmax_const = TREE_OPERAND (arg0, 1);
5600 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5601 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5602 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5603 inner = TREE_OPERAND (arg0, 0);
5604
5605 /* If something does not permit us to optimize, return the original tree. */
5606 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5607 || TREE_CODE (comp_const) != INTEGER_CST
5608 || TREE_OVERFLOW (comp_const)
5609 || TREE_CODE (minmax_const) != INTEGER_CST
5610 || TREE_OVERFLOW (minmax_const))
5611 return NULL_TREE;
5612
5613 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5614 and GT_EXPR, doing the rest with recursive calls using logical
5615 simplifications. */
5616 switch (code)
5617 {
5618 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5619 {
5620 tree tem
5621 = optimize_minmax_comparison (loc,
5622 invert_tree_comparison (code, false),
5623 type, op0, op1);
5624 if (tem)
5625 return invert_truthvalue_loc (loc, tem);
5626 return NULL_TREE;
5627 }
5628
5629 case GE_EXPR:
5630 return
5631 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5632 optimize_minmax_comparison
5633 (loc, EQ_EXPR, type, arg0, comp_const),
5634 optimize_minmax_comparison
5635 (loc, GT_EXPR, type, arg0, comp_const));
5636
5637 case EQ_EXPR:
5638 if (op_code == MAX_EXPR && consts_equal)
5639 /* MAX (X, 0) == 0 -> X <= 0 */
5640 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5641
5642 else if (op_code == MAX_EXPR && consts_lt)
5643 /* MAX (X, 0) == 5 -> X == 5 */
5644 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5645
5646 else if (op_code == MAX_EXPR)
5647 /* MAX (X, 0) == -1 -> false */
5648 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5649
5650 else if (consts_equal)
5651 /* MIN (X, 0) == 0 -> X >= 0 */
5652 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5653
5654 else if (consts_lt)
5655 /* MIN (X, 0) == 5 -> false */
5656 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5657
5658 else
5659 /* MIN (X, 0) == -1 -> X == -1 */
5660 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5661
5662 case GT_EXPR:
5663 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5664 /* MAX (X, 0) > 0 -> X > 0
5665 MAX (X, 0) > 5 -> X > 5 */
5666 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5667
5668 else if (op_code == MAX_EXPR)
5669 /* MAX (X, 0) > -1 -> true */
5670 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5671
5672 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5673 /* MIN (X, 0) > 0 -> false
5674 MIN (X, 0) > 5 -> false */
5675 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5676
5677 else
5678 /* MIN (X, 0) > -1 -> X > -1 */
5679 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5680
5681 default:
5682 return NULL_TREE;
5683 }
5684 }
5685 \f
5686 /* T is an integer expression that is being multiplied, divided, or taken a
5687 modulus (CODE says which and what kind of divide or modulus) by a
5688 constant C. See if we can eliminate that operation by folding it with
5689 other operations already in T. WIDE_TYPE, if non-null, is a type that
5690 should be used for the computation if wider than our type.
5691
5692 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5693 (X * 2) + (Y * 4). We must, however, be assured that either the original
5694 expression would not overflow or that overflow is undefined for the type
5695 in the language in question.
5696
5697 If we return a non-null expression, it is an equivalent form of the
5698 original computation, but need not be in the original type.
5699
5700 We set *STRICT_OVERFLOW_P to true if the return values depends on
5701 signed overflow being undefined. Otherwise we do not change
5702 *STRICT_OVERFLOW_P. */
5703
5704 static tree
5705 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5706 bool *strict_overflow_p)
5707 {
5708 /* To avoid exponential search depth, refuse to allow recursion past
5709 three levels. Beyond that (1) it's highly unlikely that we'll find
5710 something interesting and (2) we've probably processed it before
5711 when we built the inner expression. */
5712
5713 static int depth;
5714 tree ret;
5715
5716 if (depth > 3)
5717 return NULL;
5718
5719 depth++;
5720 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5721 depth--;
5722
5723 return ret;
5724 }
5725
5726 static tree
5727 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5728 bool *strict_overflow_p)
5729 {
5730 tree type = TREE_TYPE (t);
5731 enum tree_code tcode = TREE_CODE (t);
5732 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5733 > GET_MODE_SIZE (TYPE_MODE (type)))
5734 ? wide_type : type);
5735 tree t1, t2;
5736 int same_p = tcode == code;
5737 tree op0 = NULL_TREE, op1 = NULL_TREE;
5738 bool sub_strict_overflow_p;
5739
5740 /* Don't deal with constants of zero here; they confuse the code below. */
5741 if (integer_zerop (c))
5742 return NULL_TREE;
5743
5744 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5745 op0 = TREE_OPERAND (t, 0);
5746
5747 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5748 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5749
5750 /* Note that we need not handle conditional operations here since fold
5751 already handles those cases. So just do arithmetic here. */
5752 switch (tcode)
5753 {
5754 case INTEGER_CST:
5755 /* For a constant, we can always simplify if we are a multiply
5756 or (for divide and modulus) if it is a multiple of our constant. */
5757 if (code == MULT_EXPR
5758 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5759 return const_binop (code, fold_convert (ctype, t),
5760 fold_convert (ctype, c));
5761 break;
5762
5763 CASE_CONVERT: case NON_LVALUE_EXPR:
5764 /* If op0 is an expression ... */
5765 if ((COMPARISON_CLASS_P (op0)
5766 || UNARY_CLASS_P (op0)
5767 || BINARY_CLASS_P (op0)
5768 || VL_EXP_CLASS_P (op0)
5769 || EXPRESSION_CLASS_P (op0))
5770 /* ... and has wrapping overflow, and its type is smaller
5771 than ctype, then we cannot pass through as widening. */
5772 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5773 && (TYPE_PRECISION (ctype)
5774 > TYPE_PRECISION (TREE_TYPE (op0))))
5775 /* ... or this is a truncation (t is narrower than op0),
5776 then we cannot pass through this narrowing. */
5777 || (TYPE_PRECISION (type)
5778 < TYPE_PRECISION (TREE_TYPE (op0)))
5779 /* ... or signedness changes for division or modulus,
5780 then we cannot pass through this conversion. */
5781 || (code != MULT_EXPR
5782 && (TYPE_UNSIGNED (ctype)
5783 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5784 /* ... or has undefined overflow while the converted to
5785 type has not, we cannot do the operation in the inner type
5786 as that would introduce undefined overflow. */
5787 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5788 && !TYPE_OVERFLOW_UNDEFINED (type))))
5789 break;
5790
5791 /* Pass the constant down and see if we can make a simplification. If
5792 we can, replace this expression with the inner simplification for
5793 possible later conversion to our or some other type. */
5794 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5795 && TREE_CODE (t2) == INTEGER_CST
5796 && !TREE_OVERFLOW (t2)
5797 && (0 != (t1 = extract_muldiv (op0, t2, code,
5798 code == MULT_EXPR
5799 ? ctype : NULL_TREE,
5800 strict_overflow_p))))
5801 return t1;
5802 break;
5803
5804 case ABS_EXPR:
5805 /* If widening the type changes it from signed to unsigned, then we
5806 must avoid building ABS_EXPR itself as unsigned. */
5807 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5808 {
5809 tree cstype = (*signed_type_for) (ctype);
5810 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5811 != 0)
5812 {
5813 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5814 return fold_convert (ctype, t1);
5815 }
5816 break;
5817 }
5818 /* If the constant is negative, we cannot simplify this. */
5819 if (tree_int_cst_sgn (c) == -1)
5820 break;
5821 /* FALLTHROUGH */
5822 case NEGATE_EXPR:
5823 /* For division and modulus, type can't be unsigned, as e.g.
5824 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5825 For signed types, even with wrapping overflow, this is fine. */
5826 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5827 break;
5828 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5829 != 0)
5830 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5831 break;
5832
5833 case MIN_EXPR: case MAX_EXPR:
5834 /* If widening the type changes the signedness, then we can't perform
5835 this optimization as that changes the result. */
5836 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5837 break;
5838
5839 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5840 sub_strict_overflow_p = false;
5841 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5842 &sub_strict_overflow_p)) != 0
5843 && (t2 = extract_muldiv (op1, c, code, wide_type,
5844 &sub_strict_overflow_p)) != 0)
5845 {
5846 if (tree_int_cst_sgn (c) < 0)
5847 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5848 if (sub_strict_overflow_p)
5849 *strict_overflow_p = true;
5850 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5851 fold_convert (ctype, t2));
5852 }
5853 break;
5854
5855 case LSHIFT_EXPR: case RSHIFT_EXPR:
5856 /* If the second operand is constant, this is a multiplication
5857 or floor division, by a power of two, so we can treat it that
5858 way unless the multiplier or divisor overflows. Signed
5859 left-shift overflow is implementation-defined rather than
5860 undefined in C90, so do not convert signed left shift into
5861 multiplication. */
5862 if (TREE_CODE (op1) == INTEGER_CST
5863 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5864 /* const_binop may not detect overflow correctly,
5865 so check for it explicitly here. */
5866 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5867 && TREE_INT_CST_HIGH (op1) == 0
5868 && 0 != (t1 = fold_convert (ctype,
5869 const_binop (LSHIFT_EXPR,
5870 size_one_node,
5871 op1)))
5872 && !TREE_OVERFLOW (t1))
5873 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5874 ? MULT_EXPR : FLOOR_DIV_EXPR,
5875 ctype,
5876 fold_convert (ctype, op0),
5877 t1),
5878 c, code, wide_type, strict_overflow_p);
5879 break;
5880
5881 case PLUS_EXPR: case MINUS_EXPR:
5882 /* See if we can eliminate the operation on both sides. If we can, we
5883 can return a new PLUS or MINUS. If we can't, the only remaining
5884 cases where we can do anything are if the second operand is a
5885 constant. */
5886 sub_strict_overflow_p = false;
5887 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5888 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5889 if (t1 != 0 && t2 != 0
5890 && (code == MULT_EXPR
5891 /* If not multiplication, we can only do this if both operands
5892 are divisible by c. */
5893 || (multiple_of_p (ctype, op0, c)
5894 && multiple_of_p (ctype, op1, c))))
5895 {
5896 if (sub_strict_overflow_p)
5897 *strict_overflow_p = true;
5898 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5899 fold_convert (ctype, t2));
5900 }
5901
5902 /* If this was a subtraction, negate OP1 and set it to be an addition.
5903 This simplifies the logic below. */
5904 if (tcode == MINUS_EXPR)
5905 {
5906 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5907 /* If OP1 was not easily negatable, the constant may be OP0. */
5908 if (TREE_CODE (op0) == INTEGER_CST)
5909 {
5910 tree tem = op0;
5911 op0 = op1;
5912 op1 = tem;
5913 tem = t1;
5914 t1 = t2;
5915 t2 = tem;
5916 }
5917 }
5918
5919 if (TREE_CODE (op1) != INTEGER_CST)
5920 break;
5921
5922 /* If either OP1 or C are negative, this optimization is not safe for
5923 some of the division and remainder types while for others we need
5924 to change the code. */
5925 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5926 {
5927 if (code == CEIL_DIV_EXPR)
5928 code = FLOOR_DIV_EXPR;
5929 else if (code == FLOOR_DIV_EXPR)
5930 code = CEIL_DIV_EXPR;
5931 else if (code != MULT_EXPR
5932 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5933 break;
5934 }
5935
5936 /* If it's a multiply or a division/modulus operation of a multiple
5937 of our constant, do the operation and verify it doesn't overflow. */
5938 if (code == MULT_EXPR
5939 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5940 {
5941 op1 = const_binop (code, fold_convert (ctype, op1),
5942 fold_convert (ctype, c));
5943 /* We allow the constant to overflow with wrapping semantics. */
5944 if (op1 == 0
5945 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5946 break;
5947 }
5948 else
5949 break;
5950
5951 /* If we have an unsigned type, we cannot widen the operation since it
5952 will change the result if the original computation overflowed. */
5953 if (TYPE_UNSIGNED (ctype) && ctype != type)
5954 break;
5955
5956 /* If we were able to eliminate our operation from the first side,
5957 apply our operation to the second side and reform the PLUS. */
5958 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5959 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5960
5961 /* The last case is if we are a multiply. In that case, we can
5962 apply the distributive law to commute the multiply and addition
5963 if the multiplication of the constants doesn't overflow
5964 and overflow is defined. With undefined overflow
5965 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5966 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5967 return fold_build2 (tcode, ctype,
5968 fold_build2 (code, ctype,
5969 fold_convert (ctype, op0),
5970 fold_convert (ctype, c)),
5971 op1);
5972
5973 break;
5974
5975 case MULT_EXPR:
5976 /* We have a special case here if we are doing something like
5977 (C * 8) % 4 since we know that's zero. */
5978 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5979 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5980 /* If the multiplication can overflow we cannot optimize this. */
5981 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5982 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5983 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5984 {
5985 *strict_overflow_p = true;
5986 return omit_one_operand (type, integer_zero_node, op0);
5987 }
5988
5989 /* ... fall through ... */
5990
5991 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5992 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5993 /* If we can extract our operation from the LHS, do so and return a
5994 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5995 do something only if the second operand is a constant. */
5996 if (same_p
5997 && (t1 = extract_muldiv (op0, c, code, wide_type,
5998 strict_overflow_p)) != 0)
5999 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6000 fold_convert (ctype, op1));
6001 else if (tcode == MULT_EXPR && code == MULT_EXPR
6002 && (t1 = extract_muldiv (op1, c, code, wide_type,
6003 strict_overflow_p)) != 0)
6004 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6005 fold_convert (ctype, t1));
6006 else if (TREE_CODE (op1) != INTEGER_CST)
6007 return 0;
6008
6009 /* If these are the same operation types, we can associate them
6010 assuming no overflow. */
6011 if (tcode == code)
6012 {
6013 double_int mul;
6014 bool overflow_p;
6015 unsigned prec = TYPE_PRECISION (ctype);
6016 bool uns = TYPE_UNSIGNED (ctype);
6017 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6018 double_int dic = tree_to_double_int (c).ext (prec, uns);
6019 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6020 overflow_p = ((!uns && overflow_p)
6021 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6022 if (!double_int_fits_to_tree_p (ctype, mul)
6023 && ((uns && tcode != MULT_EXPR) || !uns))
6024 overflow_p = 1;
6025 if (!overflow_p)
6026 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6027 double_int_to_tree (ctype, mul));
6028 }
6029
6030 /* If these operations "cancel" each other, we have the main
6031 optimizations of this pass, which occur when either constant is a
6032 multiple of the other, in which case we replace this with either an
6033 operation or CODE or TCODE.
6034
6035 If we have an unsigned type, we cannot do this since it will change
6036 the result if the original computation overflowed. */
6037 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6038 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6039 || (tcode == MULT_EXPR
6040 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6041 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6042 && code != MULT_EXPR)))
6043 {
6044 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6045 {
6046 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6047 *strict_overflow_p = true;
6048 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6049 fold_convert (ctype,
6050 const_binop (TRUNC_DIV_EXPR,
6051 op1, c)));
6052 }
6053 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6054 {
6055 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6056 *strict_overflow_p = true;
6057 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6058 fold_convert (ctype,
6059 const_binop (TRUNC_DIV_EXPR,
6060 c, op1)));
6061 }
6062 }
6063 break;
6064
6065 default:
6066 break;
6067 }
6068
6069 return 0;
6070 }
6071 \f
6072 /* Return a node which has the indicated constant VALUE (either 0 or
6073 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6074 and is of the indicated TYPE. */
6075
6076 tree
6077 constant_boolean_node (bool value, tree type)
6078 {
6079 if (type == integer_type_node)
6080 return value ? integer_one_node : integer_zero_node;
6081 else if (type == boolean_type_node)
6082 return value ? boolean_true_node : boolean_false_node;
6083 else if (TREE_CODE (type) == VECTOR_TYPE)
6084 return build_vector_from_val (type,
6085 build_int_cst (TREE_TYPE (type),
6086 value ? -1 : 0));
6087 else
6088 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6089 }
6090
6091
6092 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6093 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6094 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6095 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6096 COND is the first argument to CODE; otherwise (as in the example
6097 given here), it is the second argument. TYPE is the type of the
6098 original expression. Return NULL_TREE if no simplification is
6099 possible. */
6100
6101 static tree
6102 fold_binary_op_with_conditional_arg (location_t loc,
6103 enum tree_code code,
6104 tree type, tree op0, tree op1,
6105 tree cond, tree arg, int cond_first_p)
6106 {
6107 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6108 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6109 tree test, true_value, false_value;
6110 tree lhs = NULL_TREE;
6111 tree rhs = NULL_TREE;
6112 enum tree_code cond_code = COND_EXPR;
6113
6114 if (TREE_CODE (cond) == COND_EXPR
6115 || TREE_CODE (cond) == VEC_COND_EXPR)
6116 {
6117 test = TREE_OPERAND (cond, 0);
6118 true_value = TREE_OPERAND (cond, 1);
6119 false_value = TREE_OPERAND (cond, 2);
6120 /* If this operand throws an expression, then it does not make
6121 sense to try to perform a logical or arithmetic operation
6122 involving it. */
6123 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6124 lhs = true_value;
6125 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6126 rhs = false_value;
6127 }
6128 else
6129 {
6130 tree testtype = TREE_TYPE (cond);
6131 test = cond;
6132 true_value = constant_boolean_node (true, testtype);
6133 false_value = constant_boolean_node (false, testtype);
6134 }
6135
6136 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6137 cond_code = VEC_COND_EXPR;
6138
6139 /* This transformation is only worthwhile if we don't have to wrap ARG
6140 in a SAVE_EXPR and the operation can be simplified without recursing
6141 on at least one of the branches once its pushed inside the COND_EXPR. */
6142 if (!TREE_CONSTANT (arg)
6143 && (TREE_SIDE_EFFECTS (arg)
6144 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6145 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6146 return NULL_TREE;
6147
6148 arg = fold_convert_loc (loc, arg_type, arg);
6149 if (lhs == 0)
6150 {
6151 true_value = fold_convert_loc (loc, cond_type, true_value);
6152 if (cond_first_p)
6153 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6154 else
6155 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6156 }
6157 if (rhs == 0)
6158 {
6159 false_value = fold_convert_loc (loc, cond_type, false_value);
6160 if (cond_first_p)
6161 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6162 else
6163 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6164 }
6165
6166 /* Check that we have simplified at least one of the branches. */
6167 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6168 return NULL_TREE;
6169
6170 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6171 }
6172
6173 \f
6174 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6175
6176 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6177 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6178 ADDEND is the same as X.
6179
6180 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6181 and finite. The problematic cases are when X is zero, and its mode
6182 has signed zeros. In the case of rounding towards -infinity,
6183 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6184 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6185
6186 bool
6187 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6188 {
6189 if (!real_zerop (addend))
6190 return false;
6191
6192 /* Don't allow the fold with -fsignaling-nans. */
6193 if (HONOR_SNANS (TYPE_MODE (type)))
6194 return false;
6195
6196 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6197 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6198 return true;
6199
6200 /* In a vector or complex, we would need to check the sign of all zeros. */
6201 if (TREE_CODE (addend) != REAL_CST)
6202 return false;
6203
6204 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6205 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6206 negate = !negate;
6207
6208 /* The mode has signed zeros, and we have to honor their sign.
6209 In this situation, there is only one case we can return true for.
6210 X - 0 is the same as X unless rounding towards -infinity is
6211 supported. */
6212 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6213 }
6214
6215 /* Subroutine of fold() that checks comparisons of built-in math
6216 functions against real constants.
6217
6218 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6219 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6220 is the type of the result and ARG0 and ARG1 are the operands of the
6221 comparison. ARG1 must be a TREE_REAL_CST.
6222
6223 The function returns the constant folded tree if a simplification
6224 can be made, and NULL_TREE otherwise. */
6225
6226 static tree
6227 fold_mathfn_compare (location_t loc,
6228 enum built_in_function fcode, enum tree_code code,
6229 tree type, tree arg0, tree arg1)
6230 {
6231 REAL_VALUE_TYPE c;
6232
6233 if (BUILTIN_SQRT_P (fcode))
6234 {
6235 tree arg = CALL_EXPR_ARG (arg0, 0);
6236 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6237
6238 c = TREE_REAL_CST (arg1);
6239 if (REAL_VALUE_NEGATIVE (c))
6240 {
6241 /* sqrt(x) < y is always false, if y is negative. */
6242 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6243 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6244
6245 /* sqrt(x) > y is always true, if y is negative and we
6246 don't care about NaNs, i.e. negative values of x. */
6247 if (code == NE_EXPR || !HONOR_NANS (mode))
6248 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6249
6250 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6251 return fold_build2_loc (loc, GE_EXPR, type, arg,
6252 build_real (TREE_TYPE (arg), dconst0));
6253 }
6254 else if (code == GT_EXPR || code == GE_EXPR)
6255 {
6256 REAL_VALUE_TYPE c2;
6257
6258 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6259 real_convert (&c2, mode, &c2);
6260
6261 if (REAL_VALUE_ISINF (c2))
6262 {
6263 /* sqrt(x) > y is x == +Inf, when y is very large. */
6264 if (HONOR_INFINITIES (mode))
6265 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6266 build_real (TREE_TYPE (arg), c2));
6267
6268 /* sqrt(x) > y is always false, when y is very large
6269 and we don't care about infinities. */
6270 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6271 }
6272
6273 /* sqrt(x) > c is the same as x > c*c. */
6274 return fold_build2_loc (loc, code, type, arg,
6275 build_real (TREE_TYPE (arg), c2));
6276 }
6277 else if (code == LT_EXPR || code == LE_EXPR)
6278 {
6279 REAL_VALUE_TYPE c2;
6280
6281 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6282 real_convert (&c2, mode, &c2);
6283
6284 if (REAL_VALUE_ISINF (c2))
6285 {
6286 /* sqrt(x) < y is always true, when y is a very large
6287 value and we don't care about NaNs or Infinities. */
6288 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6289 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6290
6291 /* sqrt(x) < y is x != +Inf when y is very large and we
6292 don't care about NaNs. */
6293 if (! HONOR_NANS (mode))
6294 return fold_build2_loc (loc, NE_EXPR, type, arg,
6295 build_real (TREE_TYPE (arg), c2));
6296
6297 /* sqrt(x) < y is x >= 0 when y is very large and we
6298 don't care about Infinities. */
6299 if (! HONOR_INFINITIES (mode))
6300 return fold_build2_loc (loc, GE_EXPR, type, arg,
6301 build_real (TREE_TYPE (arg), dconst0));
6302
6303 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6304 arg = save_expr (arg);
6305 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6306 fold_build2_loc (loc, GE_EXPR, type, arg,
6307 build_real (TREE_TYPE (arg),
6308 dconst0)),
6309 fold_build2_loc (loc, NE_EXPR, type, arg,
6310 build_real (TREE_TYPE (arg),
6311 c2)));
6312 }
6313
6314 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6315 if (! HONOR_NANS (mode))
6316 return fold_build2_loc (loc, code, type, arg,
6317 build_real (TREE_TYPE (arg), c2));
6318
6319 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6320 arg = save_expr (arg);
6321 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6322 fold_build2_loc (loc, GE_EXPR, type, arg,
6323 build_real (TREE_TYPE (arg),
6324 dconst0)),
6325 fold_build2_loc (loc, code, type, arg,
6326 build_real (TREE_TYPE (arg),
6327 c2)));
6328 }
6329 }
6330
6331 return NULL_TREE;
6332 }
6333
6334 /* Subroutine of fold() that optimizes comparisons against Infinities,
6335 either +Inf or -Inf.
6336
6337 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6338 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6339 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6340
6341 The function returns the constant folded tree if a simplification
6342 can be made, and NULL_TREE otherwise. */
6343
6344 static tree
6345 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6346 tree arg0, tree arg1)
6347 {
6348 enum machine_mode mode;
6349 REAL_VALUE_TYPE max;
6350 tree temp;
6351 bool neg;
6352
6353 mode = TYPE_MODE (TREE_TYPE (arg0));
6354
6355 /* For negative infinity swap the sense of the comparison. */
6356 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6357 if (neg)
6358 code = swap_tree_comparison (code);
6359
6360 switch (code)
6361 {
6362 case GT_EXPR:
6363 /* x > +Inf is always false, if with ignore sNANs. */
6364 if (HONOR_SNANS (mode))
6365 return NULL_TREE;
6366 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6367
6368 case LE_EXPR:
6369 /* x <= +Inf is always true, if we don't case about NaNs. */
6370 if (! HONOR_NANS (mode))
6371 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6372
6373 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6374 arg0 = save_expr (arg0);
6375 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6376
6377 case EQ_EXPR:
6378 case GE_EXPR:
6379 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6380 real_maxval (&max, neg, mode);
6381 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6382 arg0, build_real (TREE_TYPE (arg0), max));
6383
6384 case LT_EXPR:
6385 /* x < +Inf is always equal to x <= DBL_MAX. */
6386 real_maxval (&max, neg, mode);
6387 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6388 arg0, build_real (TREE_TYPE (arg0), max));
6389
6390 case NE_EXPR:
6391 /* x != +Inf is always equal to !(x > DBL_MAX). */
6392 real_maxval (&max, neg, mode);
6393 if (! HONOR_NANS (mode))
6394 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6395 arg0, build_real (TREE_TYPE (arg0), max));
6396
6397 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6398 arg0, build_real (TREE_TYPE (arg0), max));
6399 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6400
6401 default:
6402 break;
6403 }
6404
6405 return NULL_TREE;
6406 }
6407
6408 /* Subroutine of fold() that optimizes comparisons of a division by
6409 a nonzero integer constant against an integer constant, i.e.
6410 X/C1 op C2.
6411
6412 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6413 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6414 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6415
6416 The function returns the constant folded tree if a simplification
6417 can be made, and NULL_TREE otherwise. */
6418
6419 static tree
6420 fold_div_compare (location_t loc,
6421 enum tree_code code, tree type, tree arg0, tree arg1)
6422 {
6423 tree prod, tmp, hi, lo;
6424 tree arg00 = TREE_OPERAND (arg0, 0);
6425 tree arg01 = TREE_OPERAND (arg0, 1);
6426 double_int val;
6427 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6428 bool neg_overflow;
6429 bool overflow;
6430
6431 /* We have to do this the hard way to detect unsigned overflow.
6432 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6433 val = TREE_INT_CST (arg01)
6434 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6435 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6436 neg_overflow = false;
6437
6438 if (unsigned_p)
6439 {
6440 tmp = int_const_binop (MINUS_EXPR, arg01,
6441 build_int_cst (TREE_TYPE (arg01), 1));
6442 lo = prod;
6443
6444 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6445 val = TREE_INT_CST (prod)
6446 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6447 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6448 -1, overflow | TREE_OVERFLOW (prod));
6449 }
6450 else if (tree_int_cst_sgn (arg01) >= 0)
6451 {
6452 tmp = int_const_binop (MINUS_EXPR, arg01,
6453 build_int_cst (TREE_TYPE (arg01), 1));
6454 switch (tree_int_cst_sgn (arg1))
6455 {
6456 case -1:
6457 neg_overflow = true;
6458 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6459 hi = prod;
6460 break;
6461
6462 case 0:
6463 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6464 hi = tmp;
6465 break;
6466
6467 case 1:
6468 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6469 lo = prod;
6470 break;
6471
6472 default:
6473 gcc_unreachable ();
6474 }
6475 }
6476 else
6477 {
6478 /* A negative divisor reverses the relational operators. */
6479 code = swap_tree_comparison (code);
6480
6481 tmp = int_const_binop (PLUS_EXPR, arg01,
6482 build_int_cst (TREE_TYPE (arg01), 1));
6483 switch (tree_int_cst_sgn (arg1))
6484 {
6485 case -1:
6486 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6487 lo = prod;
6488 break;
6489
6490 case 0:
6491 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6492 lo = tmp;
6493 break;
6494
6495 case 1:
6496 neg_overflow = true;
6497 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6498 hi = prod;
6499 break;
6500
6501 default:
6502 gcc_unreachable ();
6503 }
6504 }
6505
6506 switch (code)
6507 {
6508 case EQ_EXPR:
6509 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6510 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6511 if (TREE_OVERFLOW (hi))
6512 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6513 if (TREE_OVERFLOW (lo))
6514 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6515 return build_range_check (loc, type, arg00, 1, lo, hi);
6516
6517 case NE_EXPR:
6518 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6519 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6520 if (TREE_OVERFLOW (hi))
6521 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6522 if (TREE_OVERFLOW (lo))
6523 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6524 return build_range_check (loc, type, arg00, 0, lo, hi);
6525
6526 case LT_EXPR:
6527 if (TREE_OVERFLOW (lo))
6528 {
6529 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6530 return omit_one_operand_loc (loc, type, tmp, arg00);
6531 }
6532 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6533
6534 case LE_EXPR:
6535 if (TREE_OVERFLOW (hi))
6536 {
6537 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6538 return omit_one_operand_loc (loc, type, tmp, arg00);
6539 }
6540 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6541
6542 case GT_EXPR:
6543 if (TREE_OVERFLOW (hi))
6544 {
6545 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6546 return omit_one_operand_loc (loc, type, tmp, arg00);
6547 }
6548 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6549
6550 case GE_EXPR:
6551 if (TREE_OVERFLOW (lo))
6552 {
6553 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6554 return omit_one_operand_loc (loc, type, tmp, arg00);
6555 }
6556 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6557
6558 default:
6559 break;
6560 }
6561
6562 return NULL_TREE;
6563 }
6564
6565
6566 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6567 equality/inequality test, then return a simplified form of the test
6568 using a sign testing. Otherwise return NULL. TYPE is the desired
6569 result type. */
6570
6571 static tree
6572 fold_single_bit_test_into_sign_test (location_t loc,
6573 enum tree_code code, tree arg0, tree arg1,
6574 tree result_type)
6575 {
6576 /* If this is testing a single bit, we can optimize the test. */
6577 if ((code == NE_EXPR || code == EQ_EXPR)
6578 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6579 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6580 {
6581 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6582 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6583 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6584
6585 if (arg00 != NULL_TREE
6586 /* This is only a win if casting to a signed type is cheap,
6587 i.e. when arg00's type is not a partial mode. */
6588 && TYPE_PRECISION (TREE_TYPE (arg00))
6589 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6590 {
6591 tree stype = signed_type_for (TREE_TYPE (arg00));
6592 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6593 result_type,
6594 fold_convert_loc (loc, stype, arg00),
6595 build_int_cst (stype, 0));
6596 }
6597 }
6598
6599 return NULL_TREE;
6600 }
6601
6602 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6603 equality/inequality test, then return a simplified form of
6604 the test using shifts and logical operations. Otherwise return
6605 NULL. TYPE is the desired result type. */
6606
6607 tree
6608 fold_single_bit_test (location_t loc, enum tree_code code,
6609 tree arg0, tree arg1, tree result_type)
6610 {
6611 /* If this is testing a single bit, we can optimize the test. */
6612 if ((code == NE_EXPR || code == EQ_EXPR)
6613 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6614 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6615 {
6616 tree inner = TREE_OPERAND (arg0, 0);
6617 tree type = TREE_TYPE (arg0);
6618 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6619 enum machine_mode operand_mode = TYPE_MODE (type);
6620 int ops_unsigned;
6621 tree signed_type, unsigned_type, intermediate_type;
6622 tree tem, one;
6623
6624 /* First, see if we can fold the single bit test into a sign-bit
6625 test. */
6626 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6627 result_type);
6628 if (tem)
6629 return tem;
6630
6631 /* Otherwise we have (A & C) != 0 where C is a single bit,
6632 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6633 Similarly for (A & C) == 0. */
6634
6635 /* If INNER is a right shift of a constant and it plus BITNUM does
6636 not overflow, adjust BITNUM and INNER. */
6637 if (TREE_CODE (inner) == RSHIFT_EXPR
6638 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6639 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6640 && bitnum < TYPE_PRECISION (type)
6641 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6642 bitnum - TYPE_PRECISION (type)))
6643 {
6644 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6645 inner = TREE_OPERAND (inner, 0);
6646 }
6647
6648 /* If we are going to be able to omit the AND below, we must do our
6649 operations as unsigned. If we must use the AND, we have a choice.
6650 Normally unsigned is faster, but for some machines signed is. */
6651 #ifdef LOAD_EXTEND_OP
6652 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6653 && !flag_syntax_only) ? 0 : 1;
6654 #else
6655 ops_unsigned = 1;
6656 #endif
6657
6658 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6659 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6660 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6661 inner = fold_convert_loc (loc, intermediate_type, inner);
6662
6663 if (bitnum != 0)
6664 inner = build2 (RSHIFT_EXPR, intermediate_type,
6665 inner, size_int (bitnum));
6666
6667 one = build_int_cst (intermediate_type, 1);
6668
6669 if (code == EQ_EXPR)
6670 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6671
6672 /* Put the AND last so it can combine with more things. */
6673 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6674
6675 /* Make sure to return the proper type. */
6676 inner = fold_convert_loc (loc, result_type, inner);
6677
6678 return inner;
6679 }
6680 return NULL_TREE;
6681 }
6682
6683 /* Check whether we are allowed to reorder operands arg0 and arg1,
6684 such that the evaluation of arg1 occurs before arg0. */
6685
6686 static bool
6687 reorder_operands_p (const_tree arg0, const_tree arg1)
6688 {
6689 if (! flag_evaluation_order)
6690 return true;
6691 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6692 return true;
6693 return ! TREE_SIDE_EFFECTS (arg0)
6694 && ! TREE_SIDE_EFFECTS (arg1);
6695 }
6696
6697 /* Test whether it is preferable two swap two operands, ARG0 and
6698 ARG1, for example because ARG0 is an integer constant and ARG1
6699 isn't. If REORDER is true, only recommend swapping if we can
6700 evaluate the operands in reverse order. */
6701
6702 bool
6703 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6704 {
6705 STRIP_SIGN_NOPS (arg0);
6706 STRIP_SIGN_NOPS (arg1);
6707
6708 if (TREE_CODE (arg1) == INTEGER_CST)
6709 return 0;
6710 if (TREE_CODE (arg0) == INTEGER_CST)
6711 return 1;
6712
6713 if (TREE_CODE (arg1) == REAL_CST)
6714 return 0;
6715 if (TREE_CODE (arg0) == REAL_CST)
6716 return 1;
6717
6718 if (TREE_CODE (arg1) == FIXED_CST)
6719 return 0;
6720 if (TREE_CODE (arg0) == FIXED_CST)
6721 return 1;
6722
6723 if (TREE_CODE (arg1) == COMPLEX_CST)
6724 return 0;
6725 if (TREE_CODE (arg0) == COMPLEX_CST)
6726 return 1;
6727
6728 if (TREE_CONSTANT (arg1))
6729 return 0;
6730 if (TREE_CONSTANT (arg0))
6731 return 1;
6732
6733 if (optimize_function_for_size_p (cfun))
6734 return 0;
6735
6736 if (reorder && flag_evaluation_order
6737 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6738 return 0;
6739
6740 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6741 for commutative and comparison operators. Ensuring a canonical
6742 form allows the optimizers to find additional redundancies without
6743 having to explicitly check for both orderings. */
6744 if (TREE_CODE (arg0) == SSA_NAME
6745 && TREE_CODE (arg1) == SSA_NAME
6746 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6747 return 1;
6748
6749 /* Put SSA_NAMEs last. */
6750 if (TREE_CODE (arg1) == SSA_NAME)
6751 return 0;
6752 if (TREE_CODE (arg0) == SSA_NAME)
6753 return 1;
6754
6755 /* Put variables last. */
6756 if (DECL_P (arg1))
6757 return 0;
6758 if (DECL_P (arg0))
6759 return 1;
6760
6761 return 0;
6762 }
6763
6764 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6765 ARG0 is extended to a wider type. */
6766
6767 static tree
6768 fold_widened_comparison (location_t loc, enum tree_code code,
6769 tree type, tree arg0, tree arg1)
6770 {
6771 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6772 tree arg1_unw;
6773 tree shorter_type, outer_type;
6774 tree min, max;
6775 bool above, below;
6776
6777 if (arg0_unw == arg0)
6778 return NULL_TREE;
6779 shorter_type = TREE_TYPE (arg0_unw);
6780
6781 #ifdef HAVE_canonicalize_funcptr_for_compare
6782 /* Disable this optimization if we're casting a function pointer
6783 type on targets that require function pointer canonicalization. */
6784 if (HAVE_canonicalize_funcptr_for_compare
6785 && TREE_CODE (shorter_type) == POINTER_TYPE
6786 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6787 return NULL_TREE;
6788 #endif
6789
6790 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6791 return NULL_TREE;
6792
6793 arg1_unw = get_unwidened (arg1, NULL_TREE);
6794
6795 /* If possible, express the comparison in the shorter mode. */
6796 if ((code == EQ_EXPR || code == NE_EXPR
6797 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6798 && (TREE_TYPE (arg1_unw) == shorter_type
6799 || ((TYPE_PRECISION (shorter_type)
6800 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6801 && (TYPE_UNSIGNED (shorter_type)
6802 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6803 || (TREE_CODE (arg1_unw) == INTEGER_CST
6804 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6805 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6806 && int_fits_type_p (arg1_unw, shorter_type))))
6807 return fold_build2_loc (loc, code, type, arg0_unw,
6808 fold_convert_loc (loc, shorter_type, arg1_unw));
6809
6810 if (TREE_CODE (arg1_unw) != INTEGER_CST
6811 || TREE_CODE (shorter_type) != INTEGER_TYPE
6812 || !int_fits_type_p (arg1_unw, shorter_type))
6813 return NULL_TREE;
6814
6815 /* If we are comparing with the integer that does not fit into the range
6816 of the shorter type, the result is known. */
6817 outer_type = TREE_TYPE (arg1_unw);
6818 min = lower_bound_in_type (outer_type, shorter_type);
6819 max = upper_bound_in_type (outer_type, shorter_type);
6820
6821 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6822 max, arg1_unw));
6823 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6824 arg1_unw, min));
6825
6826 switch (code)
6827 {
6828 case EQ_EXPR:
6829 if (above || below)
6830 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6831 break;
6832
6833 case NE_EXPR:
6834 if (above || below)
6835 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6836 break;
6837
6838 case LT_EXPR:
6839 case LE_EXPR:
6840 if (above)
6841 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6842 else if (below)
6843 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6844
6845 case GT_EXPR:
6846 case GE_EXPR:
6847 if (above)
6848 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6849 else if (below)
6850 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6851
6852 default:
6853 break;
6854 }
6855
6856 return NULL_TREE;
6857 }
6858
6859 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6860 ARG0 just the signedness is changed. */
6861
6862 static tree
6863 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6864 tree arg0, tree arg1)
6865 {
6866 tree arg0_inner;
6867 tree inner_type, outer_type;
6868
6869 if (!CONVERT_EXPR_P (arg0))
6870 return NULL_TREE;
6871
6872 outer_type = TREE_TYPE (arg0);
6873 arg0_inner = TREE_OPERAND (arg0, 0);
6874 inner_type = TREE_TYPE (arg0_inner);
6875
6876 #ifdef HAVE_canonicalize_funcptr_for_compare
6877 /* Disable this optimization if we're casting a function pointer
6878 type on targets that require function pointer canonicalization. */
6879 if (HAVE_canonicalize_funcptr_for_compare
6880 && TREE_CODE (inner_type) == POINTER_TYPE
6881 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6882 return NULL_TREE;
6883 #endif
6884
6885 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6886 return NULL_TREE;
6887
6888 if (TREE_CODE (arg1) != INTEGER_CST
6889 && !(CONVERT_EXPR_P (arg1)
6890 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6891 return NULL_TREE;
6892
6893 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6894 && code != NE_EXPR
6895 && code != EQ_EXPR)
6896 return NULL_TREE;
6897
6898 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6899 return NULL_TREE;
6900
6901 if (TREE_CODE (arg1) == INTEGER_CST)
6902 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6903 0, TREE_OVERFLOW (arg1));
6904 else
6905 arg1 = fold_convert_loc (loc, inner_type, arg1);
6906
6907 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6908 }
6909
6910 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6911 step of the array. Reconstructs s and delta in the case of s *
6912 delta being an integer constant (and thus already folded). ADDR is
6913 the address. MULT is the multiplicative expression. If the
6914 function succeeds, the new address expression is returned.
6915 Otherwise NULL_TREE is returned. LOC is the location of the
6916 resulting expression. */
6917
6918 static tree
6919 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6920 {
6921 tree s, delta, step;
6922 tree ref = TREE_OPERAND (addr, 0), pref;
6923 tree ret, pos;
6924 tree itype;
6925 bool mdim = false;
6926
6927 /* Strip the nops that might be added when converting op1 to sizetype. */
6928 STRIP_NOPS (op1);
6929
6930 /* Canonicalize op1 into a possibly non-constant delta
6931 and an INTEGER_CST s. */
6932 if (TREE_CODE (op1) == MULT_EXPR)
6933 {
6934 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6935
6936 STRIP_NOPS (arg0);
6937 STRIP_NOPS (arg1);
6938
6939 if (TREE_CODE (arg0) == INTEGER_CST)
6940 {
6941 s = arg0;
6942 delta = arg1;
6943 }
6944 else if (TREE_CODE (arg1) == INTEGER_CST)
6945 {
6946 s = arg1;
6947 delta = arg0;
6948 }
6949 else
6950 return NULL_TREE;
6951 }
6952 else if (TREE_CODE (op1) == INTEGER_CST)
6953 {
6954 delta = op1;
6955 s = NULL_TREE;
6956 }
6957 else
6958 {
6959 /* Simulate we are delta * 1. */
6960 delta = op1;
6961 s = integer_one_node;
6962 }
6963
6964 /* Handle &x.array the same as we would handle &x.array[0]. */
6965 if (TREE_CODE (ref) == COMPONENT_REF
6966 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6967 {
6968 tree domain;
6969
6970 /* Remember if this was a multi-dimensional array. */
6971 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6972 mdim = true;
6973
6974 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6975 if (! domain)
6976 goto cont;
6977 itype = TREE_TYPE (domain);
6978
6979 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6980 if (TREE_CODE (step) != INTEGER_CST)
6981 goto cont;
6982
6983 if (s)
6984 {
6985 if (! tree_int_cst_equal (step, s))
6986 goto cont;
6987 }
6988 else
6989 {
6990 /* Try if delta is a multiple of step. */
6991 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6992 if (! tmp)
6993 goto cont;
6994 delta = tmp;
6995 }
6996
6997 /* Only fold here if we can verify we do not overflow one
6998 dimension of a multi-dimensional array. */
6999 if (mdim)
7000 {
7001 tree tmp;
7002
7003 if (!TYPE_MIN_VALUE (domain)
7004 || !TYPE_MAX_VALUE (domain)
7005 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7006 goto cont;
7007
7008 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7009 fold_convert_loc (loc, itype,
7010 TYPE_MIN_VALUE (domain)),
7011 fold_convert_loc (loc, itype, delta));
7012 if (TREE_CODE (tmp) != INTEGER_CST
7013 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7014 goto cont;
7015 }
7016
7017 /* We found a suitable component reference. */
7018
7019 pref = TREE_OPERAND (addr, 0);
7020 ret = copy_node (pref);
7021 SET_EXPR_LOCATION (ret, loc);
7022
7023 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7024 fold_build2_loc
7025 (loc, PLUS_EXPR, itype,
7026 fold_convert_loc (loc, itype,
7027 TYPE_MIN_VALUE
7028 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7029 fold_convert_loc (loc, itype, delta)),
7030 NULL_TREE, NULL_TREE);
7031 return build_fold_addr_expr_loc (loc, ret);
7032 }
7033
7034 cont:
7035
7036 for (;; ref = TREE_OPERAND (ref, 0))
7037 {
7038 if (TREE_CODE (ref) == ARRAY_REF)
7039 {
7040 tree domain;
7041
7042 /* Remember if this was a multi-dimensional array. */
7043 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7044 mdim = true;
7045
7046 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7047 if (! domain)
7048 continue;
7049 itype = TREE_TYPE (domain);
7050
7051 step = array_ref_element_size (ref);
7052 if (TREE_CODE (step) != INTEGER_CST)
7053 continue;
7054
7055 if (s)
7056 {
7057 if (! tree_int_cst_equal (step, s))
7058 continue;
7059 }
7060 else
7061 {
7062 /* Try if delta is a multiple of step. */
7063 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7064 if (! tmp)
7065 continue;
7066 delta = tmp;
7067 }
7068
7069 /* Only fold here if we can verify we do not overflow one
7070 dimension of a multi-dimensional array. */
7071 if (mdim)
7072 {
7073 tree tmp;
7074
7075 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7076 || !TYPE_MAX_VALUE (domain)
7077 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7078 continue;
7079
7080 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7081 fold_convert_loc (loc, itype,
7082 TREE_OPERAND (ref, 1)),
7083 fold_convert_loc (loc, itype, delta));
7084 if (!tmp
7085 || TREE_CODE (tmp) != INTEGER_CST
7086 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7087 continue;
7088 }
7089
7090 break;
7091 }
7092 else
7093 mdim = false;
7094
7095 if (!handled_component_p (ref))
7096 return NULL_TREE;
7097 }
7098
7099 /* We found the suitable array reference. So copy everything up to it,
7100 and replace the index. */
7101
7102 pref = TREE_OPERAND (addr, 0);
7103 ret = copy_node (pref);
7104 SET_EXPR_LOCATION (ret, loc);
7105 pos = ret;
7106
7107 while (pref != ref)
7108 {
7109 pref = TREE_OPERAND (pref, 0);
7110 TREE_OPERAND (pos, 0) = copy_node (pref);
7111 pos = TREE_OPERAND (pos, 0);
7112 }
7113
7114 TREE_OPERAND (pos, 1)
7115 = fold_build2_loc (loc, PLUS_EXPR, itype,
7116 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7117 fold_convert_loc (loc, itype, delta));
7118 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7119 }
7120
7121
7122 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7123 means A >= Y && A != MAX, but in this case we know that
7124 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7125
7126 static tree
7127 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7128 {
7129 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7130
7131 if (TREE_CODE (bound) == LT_EXPR)
7132 a = TREE_OPERAND (bound, 0);
7133 else if (TREE_CODE (bound) == GT_EXPR)
7134 a = TREE_OPERAND (bound, 1);
7135 else
7136 return NULL_TREE;
7137
7138 typea = TREE_TYPE (a);
7139 if (!INTEGRAL_TYPE_P (typea)
7140 && !POINTER_TYPE_P (typea))
7141 return NULL_TREE;
7142
7143 if (TREE_CODE (ineq) == LT_EXPR)
7144 {
7145 a1 = TREE_OPERAND (ineq, 1);
7146 y = TREE_OPERAND (ineq, 0);
7147 }
7148 else if (TREE_CODE (ineq) == GT_EXPR)
7149 {
7150 a1 = TREE_OPERAND (ineq, 0);
7151 y = TREE_OPERAND (ineq, 1);
7152 }
7153 else
7154 return NULL_TREE;
7155
7156 if (TREE_TYPE (a1) != typea)
7157 return NULL_TREE;
7158
7159 if (POINTER_TYPE_P (typea))
7160 {
7161 /* Convert the pointer types into integer before taking the difference. */
7162 tree ta = fold_convert_loc (loc, ssizetype, a);
7163 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7164 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7165 }
7166 else
7167 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7168
7169 if (!diff || !integer_onep (diff))
7170 return NULL_TREE;
7171
7172 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7173 }
7174
7175 /* Fold a sum or difference of at least one multiplication.
7176 Returns the folded tree or NULL if no simplification could be made. */
7177
7178 static tree
7179 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7180 tree arg0, tree arg1)
7181 {
7182 tree arg00, arg01, arg10, arg11;
7183 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7184
7185 /* (A * C) +- (B * C) -> (A+-B) * C.
7186 (A * C) +- A -> A * (C+-1).
7187 We are most concerned about the case where C is a constant,
7188 but other combinations show up during loop reduction. Since
7189 it is not difficult, try all four possibilities. */
7190
7191 if (TREE_CODE (arg0) == MULT_EXPR)
7192 {
7193 arg00 = TREE_OPERAND (arg0, 0);
7194 arg01 = TREE_OPERAND (arg0, 1);
7195 }
7196 else if (TREE_CODE (arg0) == INTEGER_CST)
7197 {
7198 arg00 = build_one_cst (type);
7199 arg01 = arg0;
7200 }
7201 else
7202 {
7203 /* We cannot generate constant 1 for fract. */
7204 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7205 return NULL_TREE;
7206 arg00 = arg0;
7207 arg01 = build_one_cst (type);
7208 }
7209 if (TREE_CODE (arg1) == MULT_EXPR)
7210 {
7211 arg10 = TREE_OPERAND (arg1, 0);
7212 arg11 = TREE_OPERAND (arg1, 1);
7213 }
7214 else if (TREE_CODE (arg1) == INTEGER_CST)
7215 {
7216 arg10 = build_one_cst (type);
7217 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7218 the purpose of this canonicalization. */
7219 if (TREE_INT_CST_HIGH (arg1) == -1
7220 && negate_expr_p (arg1)
7221 && code == PLUS_EXPR)
7222 {
7223 arg11 = negate_expr (arg1);
7224 code = MINUS_EXPR;
7225 }
7226 else
7227 arg11 = arg1;
7228 }
7229 else
7230 {
7231 /* We cannot generate constant 1 for fract. */
7232 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7233 return NULL_TREE;
7234 arg10 = arg1;
7235 arg11 = build_one_cst (type);
7236 }
7237 same = NULL_TREE;
7238
7239 if (operand_equal_p (arg01, arg11, 0))
7240 same = arg01, alt0 = arg00, alt1 = arg10;
7241 else if (operand_equal_p (arg00, arg10, 0))
7242 same = arg00, alt0 = arg01, alt1 = arg11;
7243 else if (operand_equal_p (arg00, arg11, 0))
7244 same = arg00, alt0 = arg01, alt1 = arg10;
7245 else if (operand_equal_p (arg01, arg10, 0))
7246 same = arg01, alt0 = arg00, alt1 = arg11;
7247
7248 /* No identical multiplicands; see if we can find a common
7249 power-of-two factor in non-power-of-two multiplies. This
7250 can help in multi-dimensional array access. */
7251 else if (host_integerp (arg01, 0)
7252 && host_integerp (arg11, 0))
7253 {
7254 HOST_WIDE_INT int01, int11, tmp;
7255 bool swap = false;
7256 tree maybe_same;
7257 int01 = TREE_INT_CST_LOW (arg01);
7258 int11 = TREE_INT_CST_LOW (arg11);
7259
7260 /* Move min of absolute values to int11. */
7261 if (absu_hwi (int01) < absu_hwi (int11))
7262 {
7263 tmp = int01, int01 = int11, int11 = tmp;
7264 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7265 maybe_same = arg01;
7266 swap = true;
7267 }
7268 else
7269 maybe_same = arg11;
7270
7271 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7272 /* The remainder should not be a constant, otherwise we
7273 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7274 increased the number of multiplications necessary. */
7275 && TREE_CODE (arg10) != INTEGER_CST)
7276 {
7277 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7278 build_int_cst (TREE_TYPE (arg00),
7279 int01 / int11));
7280 alt1 = arg10;
7281 same = maybe_same;
7282 if (swap)
7283 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7284 }
7285 }
7286
7287 if (same)
7288 return fold_build2_loc (loc, MULT_EXPR, type,
7289 fold_build2_loc (loc, code, type,
7290 fold_convert_loc (loc, type, alt0),
7291 fold_convert_loc (loc, type, alt1)),
7292 fold_convert_loc (loc, type, same));
7293
7294 return NULL_TREE;
7295 }
7296
7297 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7298 specified by EXPR into the buffer PTR of length LEN bytes.
7299 Return the number of bytes placed in the buffer, or zero
7300 upon failure. */
7301
7302 static int
7303 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7304 {
7305 tree type = TREE_TYPE (expr);
7306 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7307 int byte, offset, word, words;
7308 unsigned char value;
7309
7310 if (total_bytes > len)
7311 return 0;
7312 words = total_bytes / UNITS_PER_WORD;
7313
7314 for (byte = 0; byte < total_bytes; byte++)
7315 {
7316 int bitpos = byte * BITS_PER_UNIT;
7317 if (bitpos < HOST_BITS_PER_WIDE_INT)
7318 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7319 else
7320 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7321 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7322
7323 if (total_bytes > UNITS_PER_WORD)
7324 {
7325 word = byte / UNITS_PER_WORD;
7326 if (WORDS_BIG_ENDIAN)
7327 word = (words - 1) - word;
7328 offset = word * UNITS_PER_WORD;
7329 if (BYTES_BIG_ENDIAN)
7330 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7331 else
7332 offset += byte % UNITS_PER_WORD;
7333 }
7334 else
7335 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7336 ptr[offset] = value;
7337 }
7338 return total_bytes;
7339 }
7340
7341
7342 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7343 specified by EXPR into the buffer PTR of length LEN bytes.
7344 Return the number of bytes placed in the buffer, or zero
7345 upon failure. */
7346
7347 static int
7348 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7349 {
7350 tree type = TREE_TYPE (expr);
7351 enum machine_mode mode = TYPE_MODE (type);
7352 int total_bytes = GET_MODE_SIZE (mode);
7353 FIXED_VALUE_TYPE value;
7354 tree i_value, i_type;
7355
7356 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7357 return 0;
7358
7359 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7360
7361 if (NULL_TREE == i_type
7362 || TYPE_PRECISION (i_type) != total_bytes)
7363 return 0;
7364
7365 value = TREE_FIXED_CST (expr);
7366 i_value = double_int_to_tree (i_type, value.data);
7367
7368 return native_encode_int (i_value, ptr, len);
7369 }
7370
7371
7372 /* Subroutine of native_encode_expr. Encode the REAL_CST
7373 specified by EXPR into the buffer PTR of length LEN bytes.
7374 Return the number of bytes placed in the buffer, or zero
7375 upon failure. */
7376
7377 static int
7378 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7379 {
7380 tree type = TREE_TYPE (expr);
7381 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7382 int byte, offset, word, words, bitpos;
7383 unsigned char value;
7384
7385 /* There are always 32 bits in each long, no matter the size of
7386 the hosts long. We handle floating point representations with
7387 up to 192 bits. */
7388 long tmp[6];
7389
7390 if (total_bytes > len)
7391 return 0;
7392 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7393
7394 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7395
7396 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7397 bitpos += BITS_PER_UNIT)
7398 {
7399 byte = (bitpos / BITS_PER_UNIT) & 3;
7400 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7401
7402 if (UNITS_PER_WORD < 4)
7403 {
7404 word = byte / UNITS_PER_WORD;
7405 if (WORDS_BIG_ENDIAN)
7406 word = (words - 1) - word;
7407 offset = word * UNITS_PER_WORD;
7408 if (BYTES_BIG_ENDIAN)
7409 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7410 else
7411 offset += byte % UNITS_PER_WORD;
7412 }
7413 else
7414 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7415 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7416 }
7417 return total_bytes;
7418 }
7419
7420 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7421 specified by EXPR into the buffer PTR of length LEN bytes.
7422 Return the number of bytes placed in the buffer, or zero
7423 upon failure. */
7424
7425 static int
7426 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7427 {
7428 int rsize, isize;
7429 tree part;
7430
7431 part = TREE_REALPART (expr);
7432 rsize = native_encode_expr (part, ptr, len);
7433 if (rsize == 0)
7434 return 0;
7435 part = TREE_IMAGPART (expr);
7436 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7437 if (isize != rsize)
7438 return 0;
7439 return rsize + isize;
7440 }
7441
7442
7443 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7444 specified by EXPR into the buffer PTR of length LEN bytes.
7445 Return the number of bytes placed in the buffer, or zero
7446 upon failure. */
7447
7448 static int
7449 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7450 {
7451 unsigned i, count;
7452 int size, offset;
7453 tree itype, elem;
7454
7455 offset = 0;
7456 count = VECTOR_CST_NELTS (expr);
7457 itype = TREE_TYPE (TREE_TYPE (expr));
7458 size = GET_MODE_SIZE (TYPE_MODE (itype));
7459 for (i = 0; i < count; i++)
7460 {
7461 elem = VECTOR_CST_ELT (expr, i);
7462 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7463 return 0;
7464 offset += size;
7465 }
7466 return offset;
7467 }
7468
7469
7470 /* Subroutine of native_encode_expr. Encode the STRING_CST
7471 specified by EXPR into the buffer PTR of length LEN bytes.
7472 Return the number of bytes placed in the buffer, or zero
7473 upon failure. */
7474
7475 static int
7476 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7477 {
7478 tree type = TREE_TYPE (expr);
7479 HOST_WIDE_INT total_bytes;
7480
7481 if (TREE_CODE (type) != ARRAY_TYPE
7482 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7483 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7484 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7485 return 0;
7486 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7487 if (total_bytes > len)
7488 return 0;
7489 if (TREE_STRING_LENGTH (expr) < total_bytes)
7490 {
7491 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7492 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7493 total_bytes - TREE_STRING_LENGTH (expr));
7494 }
7495 else
7496 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7497 return total_bytes;
7498 }
7499
7500
7501 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7502 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7503 buffer PTR of length LEN bytes. Return the number of bytes
7504 placed in the buffer, or zero upon failure. */
7505
7506 int
7507 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7508 {
7509 switch (TREE_CODE (expr))
7510 {
7511 case INTEGER_CST:
7512 return native_encode_int (expr, ptr, len);
7513
7514 case REAL_CST:
7515 return native_encode_real (expr, ptr, len);
7516
7517 case FIXED_CST:
7518 return native_encode_fixed (expr, ptr, len);
7519
7520 case COMPLEX_CST:
7521 return native_encode_complex (expr, ptr, len);
7522
7523 case VECTOR_CST:
7524 return native_encode_vector (expr, ptr, len);
7525
7526 case STRING_CST:
7527 return native_encode_string (expr, ptr, len);
7528
7529 default:
7530 return 0;
7531 }
7532 }
7533
7534
7535 /* Subroutine of native_interpret_expr. Interpret the contents of
7536 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7537 If the buffer cannot be interpreted, return NULL_TREE. */
7538
7539 static tree
7540 native_interpret_int (tree type, const unsigned char *ptr, int len)
7541 {
7542 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7543 double_int result;
7544
7545 if (total_bytes > len
7546 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7547 return NULL_TREE;
7548
7549 result = double_int::from_buffer (ptr, total_bytes);
7550
7551 return double_int_to_tree (type, result);
7552 }
7553
7554
7555 /* Subroutine of native_interpret_expr. Interpret the contents of
7556 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7557 If the buffer cannot be interpreted, return NULL_TREE. */
7558
7559 static tree
7560 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7561 {
7562 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7563 double_int result;
7564 FIXED_VALUE_TYPE fixed_value;
7565
7566 if (total_bytes > len
7567 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7568 return NULL_TREE;
7569
7570 result = double_int::from_buffer (ptr, total_bytes);
7571 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7572
7573 return build_fixed (type, fixed_value);
7574 }
7575
7576
7577 /* Subroutine of native_interpret_expr. Interpret the contents of
7578 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7579 If the buffer cannot be interpreted, return NULL_TREE. */
7580
7581 static tree
7582 native_interpret_real (tree type, const unsigned char *ptr, int len)
7583 {
7584 enum machine_mode mode = TYPE_MODE (type);
7585 int total_bytes = GET_MODE_SIZE (mode);
7586 int byte, offset, word, words, bitpos;
7587 unsigned char value;
7588 /* There are always 32 bits in each long, no matter the size of
7589 the hosts long. We handle floating point representations with
7590 up to 192 bits. */
7591 REAL_VALUE_TYPE r;
7592 long tmp[6];
7593
7594 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7595 if (total_bytes > len || total_bytes > 24)
7596 return NULL_TREE;
7597 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7598
7599 memset (tmp, 0, sizeof (tmp));
7600 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7601 bitpos += BITS_PER_UNIT)
7602 {
7603 byte = (bitpos / BITS_PER_UNIT) & 3;
7604 if (UNITS_PER_WORD < 4)
7605 {
7606 word = byte / UNITS_PER_WORD;
7607 if (WORDS_BIG_ENDIAN)
7608 word = (words - 1) - word;
7609 offset = word * UNITS_PER_WORD;
7610 if (BYTES_BIG_ENDIAN)
7611 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7612 else
7613 offset += byte % UNITS_PER_WORD;
7614 }
7615 else
7616 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7617 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7618
7619 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7620 }
7621
7622 real_from_target (&r, tmp, mode);
7623 return build_real (type, r);
7624 }
7625
7626
7627 /* Subroutine of native_interpret_expr. Interpret the contents of
7628 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7629 If the buffer cannot be interpreted, return NULL_TREE. */
7630
7631 static tree
7632 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7633 {
7634 tree etype, rpart, ipart;
7635 int size;
7636
7637 etype = TREE_TYPE (type);
7638 size = GET_MODE_SIZE (TYPE_MODE (etype));
7639 if (size * 2 > len)
7640 return NULL_TREE;
7641 rpart = native_interpret_expr (etype, ptr, size);
7642 if (!rpart)
7643 return NULL_TREE;
7644 ipart = native_interpret_expr (etype, ptr+size, size);
7645 if (!ipart)
7646 return NULL_TREE;
7647 return build_complex (type, rpart, ipart);
7648 }
7649
7650
7651 /* Subroutine of native_interpret_expr. Interpret the contents of
7652 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7653 If the buffer cannot be interpreted, return NULL_TREE. */
7654
7655 static tree
7656 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7657 {
7658 tree etype, elem;
7659 int i, size, count;
7660 tree *elements;
7661
7662 etype = TREE_TYPE (type);
7663 size = GET_MODE_SIZE (TYPE_MODE (etype));
7664 count = TYPE_VECTOR_SUBPARTS (type);
7665 if (size * count > len)
7666 return NULL_TREE;
7667
7668 elements = XALLOCAVEC (tree, count);
7669 for (i = count - 1; i >= 0; i--)
7670 {
7671 elem = native_interpret_expr (etype, ptr+(i*size), size);
7672 if (!elem)
7673 return NULL_TREE;
7674 elements[i] = elem;
7675 }
7676 return build_vector (type, elements);
7677 }
7678
7679
7680 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7681 the buffer PTR of length LEN as a constant of type TYPE. For
7682 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7683 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7684 return NULL_TREE. */
7685
7686 tree
7687 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7688 {
7689 switch (TREE_CODE (type))
7690 {
7691 case INTEGER_TYPE:
7692 case ENUMERAL_TYPE:
7693 case BOOLEAN_TYPE:
7694 case POINTER_TYPE:
7695 case REFERENCE_TYPE:
7696 return native_interpret_int (type, ptr, len);
7697
7698 case REAL_TYPE:
7699 return native_interpret_real (type, ptr, len);
7700
7701 case FIXED_POINT_TYPE:
7702 return native_interpret_fixed (type, ptr, len);
7703
7704 case COMPLEX_TYPE:
7705 return native_interpret_complex (type, ptr, len);
7706
7707 case VECTOR_TYPE:
7708 return native_interpret_vector (type, ptr, len);
7709
7710 default:
7711 return NULL_TREE;
7712 }
7713 }
7714
7715 /* Returns true if we can interpret the contents of a native encoding
7716 as TYPE. */
7717
7718 static bool
7719 can_native_interpret_type_p (tree type)
7720 {
7721 switch (TREE_CODE (type))
7722 {
7723 case INTEGER_TYPE:
7724 case ENUMERAL_TYPE:
7725 case BOOLEAN_TYPE:
7726 case POINTER_TYPE:
7727 case REFERENCE_TYPE:
7728 case FIXED_POINT_TYPE:
7729 case REAL_TYPE:
7730 case COMPLEX_TYPE:
7731 case VECTOR_TYPE:
7732 return true;
7733 default:
7734 return false;
7735 }
7736 }
7737
7738 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7739 TYPE at compile-time. If we're unable to perform the conversion
7740 return NULL_TREE. */
7741
7742 static tree
7743 fold_view_convert_expr (tree type, tree expr)
7744 {
7745 /* We support up to 512-bit values (for V8DFmode). */
7746 unsigned char buffer[64];
7747 int len;
7748
7749 /* Check that the host and target are sane. */
7750 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7751 return NULL_TREE;
7752
7753 len = native_encode_expr (expr, buffer, sizeof (buffer));
7754 if (len == 0)
7755 return NULL_TREE;
7756
7757 return native_interpret_expr (type, buffer, len);
7758 }
7759
7760 /* Build an expression for the address of T. Folds away INDIRECT_REF
7761 to avoid confusing the gimplify process. */
7762
7763 tree
7764 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7765 {
7766 /* The size of the object is not relevant when talking about its address. */
7767 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7768 t = TREE_OPERAND (t, 0);
7769
7770 if (TREE_CODE (t) == INDIRECT_REF)
7771 {
7772 t = TREE_OPERAND (t, 0);
7773
7774 if (TREE_TYPE (t) != ptrtype)
7775 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7776 }
7777 else if (TREE_CODE (t) == MEM_REF
7778 && integer_zerop (TREE_OPERAND (t, 1)))
7779 return TREE_OPERAND (t, 0);
7780 else if (TREE_CODE (t) == MEM_REF
7781 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7782 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7783 TREE_OPERAND (t, 0),
7784 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7785 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7786 {
7787 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7788
7789 if (TREE_TYPE (t) != ptrtype)
7790 t = fold_convert_loc (loc, ptrtype, t);
7791 }
7792 else
7793 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7794
7795 return t;
7796 }
7797
7798 /* Build an expression for the address of T. */
7799
7800 tree
7801 build_fold_addr_expr_loc (location_t loc, tree t)
7802 {
7803 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7804
7805 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7806 }
7807
7808 static bool vec_cst_ctor_to_array (tree, tree *);
7809
7810 /* Fold a unary expression of code CODE and type TYPE with operand
7811 OP0. Return the folded expression if folding is successful.
7812 Otherwise, return NULL_TREE. */
7813
7814 tree
7815 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7816 {
7817 tree tem;
7818 tree arg0;
7819 enum tree_code_class kind = TREE_CODE_CLASS (code);
7820
7821 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7822 && TREE_CODE_LENGTH (code) == 1);
7823
7824 arg0 = op0;
7825 if (arg0)
7826 {
7827 if (CONVERT_EXPR_CODE_P (code)
7828 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7829 {
7830 /* Don't use STRIP_NOPS, because signedness of argument type
7831 matters. */
7832 STRIP_SIGN_NOPS (arg0);
7833 }
7834 else
7835 {
7836 /* Strip any conversions that don't change the mode. This
7837 is safe for every expression, except for a comparison
7838 expression because its signedness is derived from its
7839 operands.
7840
7841 Note that this is done as an internal manipulation within
7842 the constant folder, in order to find the simplest
7843 representation of the arguments so that their form can be
7844 studied. In any cases, the appropriate type conversions
7845 should be put back in the tree that will get out of the
7846 constant folder. */
7847 STRIP_NOPS (arg0);
7848 }
7849 }
7850
7851 if (TREE_CODE_CLASS (code) == tcc_unary)
7852 {
7853 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7854 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7855 fold_build1_loc (loc, code, type,
7856 fold_convert_loc (loc, TREE_TYPE (op0),
7857 TREE_OPERAND (arg0, 1))));
7858 else if (TREE_CODE (arg0) == COND_EXPR)
7859 {
7860 tree arg01 = TREE_OPERAND (arg0, 1);
7861 tree arg02 = TREE_OPERAND (arg0, 2);
7862 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7863 arg01 = fold_build1_loc (loc, code, type,
7864 fold_convert_loc (loc,
7865 TREE_TYPE (op0), arg01));
7866 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7867 arg02 = fold_build1_loc (loc, code, type,
7868 fold_convert_loc (loc,
7869 TREE_TYPE (op0), arg02));
7870 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7871 arg01, arg02);
7872
7873 /* If this was a conversion, and all we did was to move into
7874 inside the COND_EXPR, bring it back out. But leave it if
7875 it is a conversion from integer to integer and the
7876 result precision is no wider than a word since such a
7877 conversion is cheap and may be optimized away by combine,
7878 while it couldn't if it were outside the COND_EXPR. Then return
7879 so we don't get into an infinite recursion loop taking the
7880 conversion out and then back in. */
7881
7882 if ((CONVERT_EXPR_CODE_P (code)
7883 || code == NON_LVALUE_EXPR)
7884 && TREE_CODE (tem) == COND_EXPR
7885 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7886 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7887 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7888 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7889 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7890 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7891 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7892 && (INTEGRAL_TYPE_P
7893 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7894 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7895 || flag_syntax_only))
7896 tem = build1_loc (loc, code, type,
7897 build3 (COND_EXPR,
7898 TREE_TYPE (TREE_OPERAND
7899 (TREE_OPERAND (tem, 1), 0)),
7900 TREE_OPERAND (tem, 0),
7901 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7902 TREE_OPERAND (TREE_OPERAND (tem, 2),
7903 0)));
7904 return tem;
7905 }
7906 }
7907
7908 switch (code)
7909 {
7910 case PAREN_EXPR:
7911 /* Re-association barriers around constants and other re-association
7912 barriers can be removed. */
7913 if (CONSTANT_CLASS_P (op0)
7914 || TREE_CODE (op0) == PAREN_EXPR)
7915 return fold_convert_loc (loc, type, op0);
7916 return NULL_TREE;
7917
7918 CASE_CONVERT:
7919 case FLOAT_EXPR:
7920 case FIX_TRUNC_EXPR:
7921 if (TREE_TYPE (op0) == type)
7922 return op0;
7923
7924 if (COMPARISON_CLASS_P (op0))
7925 {
7926 /* If we have (type) (a CMP b) and type is an integral type, return
7927 new expression involving the new type. Canonicalize
7928 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7929 non-integral type.
7930 Do not fold the result as that would not simplify further, also
7931 folding again results in recursions. */
7932 if (TREE_CODE (type) == BOOLEAN_TYPE)
7933 return build2_loc (loc, TREE_CODE (op0), type,
7934 TREE_OPERAND (op0, 0),
7935 TREE_OPERAND (op0, 1));
7936 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7937 && TREE_CODE (type) != VECTOR_TYPE)
7938 return build3_loc (loc, COND_EXPR, type, op0,
7939 constant_boolean_node (true, type),
7940 constant_boolean_node (false, type));
7941 }
7942
7943 /* Handle cases of two conversions in a row. */
7944 if (CONVERT_EXPR_P (op0))
7945 {
7946 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7947 tree inter_type = TREE_TYPE (op0);
7948 int inside_int = INTEGRAL_TYPE_P (inside_type);
7949 int inside_ptr = POINTER_TYPE_P (inside_type);
7950 int inside_float = FLOAT_TYPE_P (inside_type);
7951 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7952 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7953 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7954 int inter_int = INTEGRAL_TYPE_P (inter_type);
7955 int inter_ptr = POINTER_TYPE_P (inter_type);
7956 int inter_float = FLOAT_TYPE_P (inter_type);
7957 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7958 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7959 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7960 int final_int = INTEGRAL_TYPE_P (type);
7961 int final_ptr = POINTER_TYPE_P (type);
7962 int final_float = FLOAT_TYPE_P (type);
7963 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7964 unsigned int final_prec = TYPE_PRECISION (type);
7965 int final_unsignedp = TYPE_UNSIGNED (type);
7966
7967 /* In addition to the cases of two conversions in a row
7968 handled below, if we are converting something to its own
7969 type via an object of identical or wider precision, neither
7970 conversion is needed. */
7971 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7972 && (((inter_int || inter_ptr) && final_int)
7973 || (inter_float && final_float))
7974 && inter_prec >= final_prec)
7975 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7976
7977 /* Likewise, if the intermediate and initial types are either both
7978 float or both integer, we don't need the middle conversion if the
7979 former is wider than the latter and doesn't change the signedness
7980 (for integers). Avoid this if the final type is a pointer since
7981 then we sometimes need the middle conversion. Likewise if the
7982 final type has a precision not equal to the size of its mode. */
7983 if (((inter_int && inside_int)
7984 || (inter_float && inside_float)
7985 || (inter_vec && inside_vec))
7986 && inter_prec >= inside_prec
7987 && (inter_float || inter_vec
7988 || inter_unsignedp == inside_unsignedp)
7989 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7990 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7991 && ! final_ptr
7992 && (! final_vec || inter_prec == inside_prec))
7993 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7994
7995 /* If we have a sign-extension of a zero-extended value, we can
7996 replace that by a single zero-extension. Likewise if the
7997 final conversion does not change precision we can drop the
7998 intermediate conversion. */
7999 if (inside_int && inter_int && final_int
8000 && ((inside_prec < inter_prec && inter_prec < final_prec
8001 && inside_unsignedp && !inter_unsignedp)
8002 || final_prec == inter_prec))
8003 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8004
8005 /* Two conversions in a row are not needed unless:
8006 - some conversion is floating-point (overstrict for now), or
8007 - some conversion is a vector (overstrict for now), or
8008 - the intermediate type is narrower than both initial and
8009 final, or
8010 - the intermediate type and innermost type differ in signedness,
8011 and the outermost type is wider than the intermediate, or
8012 - the initial type is a pointer type and the precisions of the
8013 intermediate and final types differ, or
8014 - the final type is a pointer type and the precisions of the
8015 initial and intermediate types differ. */
8016 if (! inside_float && ! inter_float && ! final_float
8017 && ! inside_vec && ! inter_vec && ! final_vec
8018 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8019 && ! (inside_int && inter_int
8020 && inter_unsignedp != inside_unsignedp
8021 && inter_prec < final_prec)
8022 && ((inter_unsignedp && inter_prec > inside_prec)
8023 == (final_unsignedp && final_prec > inter_prec))
8024 && ! (inside_ptr && inter_prec != final_prec)
8025 && ! (final_ptr && inside_prec != inter_prec)
8026 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8027 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8028 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8029 }
8030
8031 /* Handle (T *)&A.B.C for A being of type T and B and C
8032 living at offset zero. This occurs frequently in
8033 C++ upcasting and then accessing the base. */
8034 if (TREE_CODE (op0) == ADDR_EXPR
8035 && POINTER_TYPE_P (type)
8036 && handled_component_p (TREE_OPERAND (op0, 0)))
8037 {
8038 HOST_WIDE_INT bitsize, bitpos;
8039 tree offset;
8040 enum machine_mode mode;
8041 int unsignedp, volatilep;
8042 tree base = TREE_OPERAND (op0, 0);
8043 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8044 &mode, &unsignedp, &volatilep, false);
8045 /* If the reference was to a (constant) zero offset, we can use
8046 the address of the base if it has the same base type
8047 as the result type and the pointer type is unqualified. */
8048 if (! offset && bitpos == 0
8049 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8050 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8051 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8052 return fold_convert_loc (loc, type,
8053 build_fold_addr_expr_loc (loc, base));
8054 }
8055
8056 if (TREE_CODE (op0) == MODIFY_EXPR
8057 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8058 /* Detect assigning a bitfield. */
8059 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8060 && DECL_BIT_FIELD
8061 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8062 {
8063 /* Don't leave an assignment inside a conversion
8064 unless assigning a bitfield. */
8065 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8066 /* First do the assignment, then return converted constant. */
8067 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8068 TREE_NO_WARNING (tem) = 1;
8069 TREE_USED (tem) = 1;
8070 return tem;
8071 }
8072
8073 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8074 constants (if x has signed type, the sign bit cannot be set
8075 in c). This folds extension into the BIT_AND_EXPR.
8076 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8077 very likely don't have maximal range for their precision and this
8078 transformation effectively doesn't preserve non-maximal ranges. */
8079 if (TREE_CODE (type) == INTEGER_TYPE
8080 && TREE_CODE (op0) == BIT_AND_EXPR
8081 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8082 {
8083 tree and_expr = op0;
8084 tree and0 = TREE_OPERAND (and_expr, 0);
8085 tree and1 = TREE_OPERAND (and_expr, 1);
8086 int change = 0;
8087
8088 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8089 || (TYPE_PRECISION (type)
8090 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8091 change = 1;
8092 else if (TYPE_PRECISION (TREE_TYPE (and1))
8093 <= HOST_BITS_PER_WIDE_INT
8094 && host_integerp (and1, 1))
8095 {
8096 unsigned HOST_WIDE_INT cst;
8097
8098 cst = tree_low_cst (and1, 1);
8099 cst &= (HOST_WIDE_INT) -1
8100 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8101 change = (cst == 0);
8102 #ifdef LOAD_EXTEND_OP
8103 if (change
8104 && !flag_syntax_only
8105 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8106 == ZERO_EXTEND))
8107 {
8108 tree uns = unsigned_type_for (TREE_TYPE (and0));
8109 and0 = fold_convert_loc (loc, uns, and0);
8110 and1 = fold_convert_loc (loc, uns, and1);
8111 }
8112 #endif
8113 }
8114 if (change)
8115 {
8116 tem = force_fit_type_double (type, tree_to_double_int (and1),
8117 0, TREE_OVERFLOW (and1));
8118 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8119 fold_convert_loc (loc, type, and0), tem);
8120 }
8121 }
8122
8123 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8124 when one of the new casts will fold away. Conservatively we assume
8125 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8126 if (POINTER_TYPE_P (type)
8127 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8128 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8129 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8130 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8131 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8132 {
8133 tree arg00 = TREE_OPERAND (arg0, 0);
8134 tree arg01 = TREE_OPERAND (arg0, 1);
8135
8136 return fold_build_pointer_plus_loc
8137 (loc, fold_convert_loc (loc, type, arg00), arg01);
8138 }
8139
8140 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8141 of the same precision, and X is an integer type not narrower than
8142 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8143 if (INTEGRAL_TYPE_P (type)
8144 && TREE_CODE (op0) == BIT_NOT_EXPR
8145 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8146 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8147 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8148 {
8149 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8150 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8151 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8152 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8153 fold_convert_loc (loc, type, tem));
8154 }
8155
8156 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8157 type of X and Y (integer types only). */
8158 if (INTEGRAL_TYPE_P (type)
8159 && TREE_CODE (op0) == MULT_EXPR
8160 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8161 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8162 {
8163 /* Be careful not to introduce new overflows. */
8164 tree mult_type;
8165 if (TYPE_OVERFLOW_WRAPS (type))
8166 mult_type = type;
8167 else
8168 mult_type = unsigned_type_for (type);
8169
8170 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8171 {
8172 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8173 fold_convert_loc (loc, mult_type,
8174 TREE_OPERAND (op0, 0)),
8175 fold_convert_loc (loc, mult_type,
8176 TREE_OPERAND (op0, 1)));
8177 return fold_convert_loc (loc, type, tem);
8178 }
8179 }
8180
8181 tem = fold_convert_const (code, type, op0);
8182 return tem ? tem : NULL_TREE;
8183
8184 case ADDR_SPACE_CONVERT_EXPR:
8185 if (integer_zerop (arg0))
8186 return fold_convert_const (code, type, arg0);
8187 return NULL_TREE;
8188
8189 case FIXED_CONVERT_EXPR:
8190 tem = fold_convert_const (code, type, arg0);
8191 return tem ? tem : NULL_TREE;
8192
8193 case VIEW_CONVERT_EXPR:
8194 if (TREE_TYPE (op0) == type)
8195 return op0;
8196 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8197 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8198 type, TREE_OPERAND (op0, 0));
8199 if (TREE_CODE (op0) == MEM_REF)
8200 return fold_build2_loc (loc, MEM_REF, type,
8201 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8202
8203 /* For integral conversions with the same precision or pointer
8204 conversions use a NOP_EXPR instead. */
8205 if ((INTEGRAL_TYPE_P (type)
8206 || POINTER_TYPE_P (type))
8207 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8208 || POINTER_TYPE_P (TREE_TYPE (op0)))
8209 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8210 return fold_convert_loc (loc, type, op0);
8211
8212 /* Strip inner integral conversions that do not change the precision. */
8213 if (CONVERT_EXPR_P (op0)
8214 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8215 || POINTER_TYPE_P (TREE_TYPE (op0)))
8216 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8217 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8218 && (TYPE_PRECISION (TREE_TYPE (op0))
8219 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8220 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8221 type, TREE_OPERAND (op0, 0));
8222
8223 return fold_view_convert_expr (type, op0);
8224
8225 case NEGATE_EXPR:
8226 tem = fold_negate_expr (loc, arg0);
8227 if (tem)
8228 return fold_convert_loc (loc, type, tem);
8229 return NULL_TREE;
8230
8231 case ABS_EXPR:
8232 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8233 return fold_abs_const (arg0, type);
8234 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8235 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8236 /* Convert fabs((double)float) into (double)fabsf(float). */
8237 else if (TREE_CODE (arg0) == NOP_EXPR
8238 && TREE_CODE (type) == REAL_TYPE)
8239 {
8240 tree targ0 = strip_float_extensions (arg0);
8241 if (targ0 != arg0)
8242 return fold_convert_loc (loc, type,
8243 fold_build1_loc (loc, ABS_EXPR,
8244 TREE_TYPE (targ0),
8245 targ0));
8246 }
8247 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8248 else if (TREE_CODE (arg0) == ABS_EXPR)
8249 return arg0;
8250 else if (tree_expr_nonnegative_p (arg0))
8251 return arg0;
8252
8253 /* Strip sign ops from argument. */
8254 if (TREE_CODE (type) == REAL_TYPE)
8255 {
8256 tem = fold_strip_sign_ops (arg0);
8257 if (tem)
8258 return fold_build1_loc (loc, ABS_EXPR, type,
8259 fold_convert_loc (loc, type, tem));
8260 }
8261 return NULL_TREE;
8262
8263 case CONJ_EXPR:
8264 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8265 return fold_convert_loc (loc, type, arg0);
8266 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8267 {
8268 tree itype = TREE_TYPE (type);
8269 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8270 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8271 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8272 negate_expr (ipart));
8273 }
8274 if (TREE_CODE (arg0) == COMPLEX_CST)
8275 {
8276 tree itype = TREE_TYPE (type);
8277 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8278 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8279 return build_complex (type, rpart, negate_expr (ipart));
8280 }
8281 if (TREE_CODE (arg0) == CONJ_EXPR)
8282 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8283 return NULL_TREE;
8284
8285 case BIT_NOT_EXPR:
8286 if (TREE_CODE (arg0) == INTEGER_CST)
8287 return fold_not_const (arg0, type);
8288 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8289 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8290 /* Convert ~ (-A) to A - 1. */
8291 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8292 return fold_build2_loc (loc, MINUS_EXPR, type,
8293 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8294 build_int_cst (type, 1));
8295 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8296 else if (INTEGRAL_TYPE_P (type)
8297 && ((TREE_CODE (arg0) == MINUS_EXPR
8298 && integer_onep (TREE_OPERAND (arg0, 1)))
8299 || (TREE_CODE (arg0) == PLUS_EXPR
8300 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8301 return fold_build1_loc (loc, NEGATE_EXPR, type,
8302 fold_convert_loc (loc, type,
8303 TREE_OPERAND (arg0, 0)));
8304 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8305 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8306 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8307 fold_convert_loc (loc, type,
8308 TREE_OPERAND (arg0, 0)))))
8309 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8310 fold_convert_loc (loc, type,
8311 TREE_OPERAND (arg0, 1)));
8312 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8313 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8314 fold_convert_loc (loc, type,
8315 TREE_OPERAND (arg0, 1)))))
8316 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 0)), tem);
8319 /* Perform BIT_NOT_EXPR on each element individually. */
8320 else if (TREE_CODE (arg0) == VECTOR_CST)
8321 {
8322 tree *elements;
8323 tree elem;
8324 unsigned count = VECTOR_CST_NELTS (arg0), i;
8325
8326 elements = XALLOCAVEC (tree, count);
8327 for (i = 0; i < count; i++)
8328 {
8329 elem = VECTOR_CST_ELT (arg0, i);
8330 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8331 if (elem == NULL_TREE)
8332 break;
8333 elements[i] = elem;
8334 }
8335 if (i == count)
8336 return build_vector (type, elements);
8337 }
8338 else if (COMPARISON_CLASS_P (arg0)
8339 && (VECTOR_TYPE_P (type)
8340 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8341 {
8342 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8343 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8344 HONOR_NANS (TYPE_MODE (op_type)));
8345 if (subcode != ERROR_MARK)
8346 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8347 TREE_OPERAND (arg0, 1));
8348 }
8349
8350
8351 return NULL_TREE;
8352
8353 case TRUTH_NOT_EXPR:
8354 /* Note that the operand of this must be an int
8355 and its values must be 0 or 1.
8356 ("true" is a fixed value perhaps depending on the language,
8357 but we don't handle values other than 1 correctly yet.) */
8358 tem = fold_truth_not_expr (loc, arg0);
8359 if (!tem)
8360 return NULL_TREE;
8361 return fold_convert_loc (loc, type, tem);
8362
8363 case REALPART_EXPR:
8364 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8365 return fold_convert_loc (loc, type, arg0);
8366 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8367 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8368 TREE_OPERAND (arg0, 1));
8369 if (TREE_CODE (arg0) == COMPLEX_CST)
8370 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8371 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8372 {
8373 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8374 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8375 fold_build1_loc (loc, REALPART_EXPR, itype,
8376 TREE_OPERAND (arg0, 0)),
8377 fold_build1_loc (loc, REALPART_EXPR, itype,
8378 TREE_OPERAND (arg0, 1)));
8379 return fold_convert_loc (loc, type, tem);
8380 }
8381 if (TREE_CODE (arg0) == CONJ_EXPR)
8382 {
8383 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8384 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8385 TREE_OPERAND (arg0, 0));
8386 return fold_convert_loc (loc, type, tem);
8387 }
8388 if (TREE_CODE (arg0) == CALL_EXPR)
8389 {
8390 tree fn = get_callee_fndecl (arg0);
8391 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8392 switch (DECL_FUNCTION_CODE (fn))
8393 {
8394 CASE_FLT_FN (BUILT_IN_CEXPI):
8395 fn = mathfn_built_in (type, BUILT_IN_COS);
8396 if (fn)
8397 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8398 break;
8399
8400 default:
8401 break;
8402 }
8403 }
8404 return NULL_TREE;
8405
8406 case IMAGPART_EXPR:
8407 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8408 return build_zero_cst (type);
8409 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8410 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8411 TREE_OPERAND (arg0, 0));
8412 if (TREE_CODE (arg0) == COMPLEX_CST)
8413 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8414 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8415 {
8416 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8417 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8418 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8419 TREE_OPERAND (arg0, 0)),
8420 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8421 TREE_OPERAND (arg0, 1)));
8422 return fold_convert_loc (loc, type, tem);
8423 }
8424 if (TREE_CODE (arg0) == CONJ_EXPR)
8425 {
8426 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8427 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8428 return fold_convert_loc (loc, type, negate_expr (tem));
8429 }
8430 if (TREE_CODE (arg0) == CALL_EXPR)
8431 {
8432 tree fn = get_callee_fndecl (arg0);
8433 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8434 switch (DECL_FUNCTION_CODE (fn))
8435 {
8436 CASE_FLT_FN (BUILT_IN_CEXPI):
8437 fn = mathfn_built_in (type, BUILT_IN_SIN);
8438 if (fn)
8439 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8440 break;
8441
8442 default:
8443 break;
8444 }
8445 }
8446 return NULL_TREE;
8447
8448 case INDIRECT_REF:
8449 /* Fold *&X to X if X is an lvalue. */
8450 if (TREE_CODE (op0) == ADDR_EXPR)
8451 {
8452 tree op00 = TREE_OPERAND (op0, 0);
8453 if ((TREE_CODE (op00) == VAR_DECL
8454 || TREE_CODE (op00) == PARM_DECL
8455 || TREE_CODE (op00) == RESULT_DECL)
8456 && !TREE_READONLY (op00))
8457 return op00;
8458 }
8459 return NULL_TREE;
8460
8461 case VEC_UNPACK_LO_EXPR:
8462 case VEC_UNPACK_HI_EXPR:
8463 case VEC_UNPACK_FLOAT_LO_EXPR:
8464 case VEC_UNPACK_FLOAT_HI_EXPR:
8465 {
8466 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8467 tree *elts;
8468 enum tree_code subcode;
8469
8470 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8471 if (TREE_CODE (arg0) != VECTOR_CST)
8472 return NULL_TREE;
8473
8474 elts = XALLOCAVEC (tree, nelts * 2);
8475 if (!vec_cst_ctor_to_array (arg0, elts))
8476 return NULL_TREE;
8477
8478 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8479 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8480 elts += nelts;
8481
8482 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8483 subcode = NOP_EXPR;
8484 else
8485 subcode = FLOAT_EXPR;
8486
8487 for (i = 0; i < nelts; i++)
8488 {
8489 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8490 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8491 return NULL_TREE;
8492 }
8493
8494 return build_vector (type, elts);
8495 }
8496
8497 case REDUC_MIN_EXPR:
8498 case REDUC_MAX_EXPR:
8499 case REDUC_PLUS_EXPR:
8500 {
8501 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8502 tree *elts;
8503 enum tree_code subcode;
8504
8505 if (TREE_CODE (op0) != VECTOR_CST)
8506 return NULL_TREE;
8507
8508 elts = XALLOCAVEC (tree, nelts);
8509 if (!vec_cst_ctor_to_array (op0, elts))
8510 return NULL_TREE;
8511
8512 switch (code)
8513 {
8514 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8515 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8516 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8517 default: gcc_unreachable ();
8518 }
8519
8520 for (i = 1; i < nelts; i++)
8521 {
8522 elts[0] = const_binop (subcode, elts[0], elts[i]);
8523 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8524 return NULL_TREE;
8525 elts[i] = build_zero_cst (TREE_TYPE (type));
8526 }
8527
8528 return build_vector (type, elts);
8529 }
8530
8531 default:
8532 return NULL_TREE;
8533 } /* switch (code) */
8534 }
8535
8536
8537 /* If the operation was a conversion do _not_ mark a resulting constant
8538 with TREE_OVERFLOW if the original constant was not. These conversions
8539 have implementation defined behavior and retaining the TREE_OVERFLOW
8540 flag here would confuse later passes such as VRP. */
8541 tree
8542 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8543 tree type, tree op0)
8544 {
8545 tree res = fold_unary_loc (loc, code, type, op0);
8546 if (res
8547 && TREE_CODE (res) == INTEGER_CST
8548 && TREE_CODE (op0) == INTEGER_CST
8549 && CONVERT_EXPR_CODE_P (code))
8550 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8551
8552 return res;
8553 }
8554
8555 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8556 operands OP0 and OP1. LOC is the location of the resulting expression.
8557 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8558 Return the folded expression if folding is successful. Otherwise,
8559 return NULL_TREE. */
8560 static tree
8561 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8562 tree arg0, tree arg1, tree op0, tree op1)
8563 {
8564 tree tem;
8565
8566 /* We only do these simplifications if we are optimizing. */
8567 if (!optimize)
8568 return NULL_TREE;
8569
8570 /* Check for things like (A || B) && (A || C). We can convert this
8571 to A || (B && C). Note that either operator can be any of the four
8572 truth and/or operations and the transformation will still be
8573 valid. Also note that we only care about order for the
8574 ANDIF and ORIF operators. If B contains side effects, this
8575 might change the truth-value of A. */
8576 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8577 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8578 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8579 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8580 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8581 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8582 {
8583 tree a00 = TREE_OPERAND (arg0, 0);
8584 tree a01 = TREE_OPERAND (arg0, 1);
8585 tree a10 = TREE_OPERAND (arg1, 0);
8586 tree a11 = TREE_OPERAND (arg1, 1);
8587 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8588 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8589 && (code == TRUTH_AND_EXPR
8590 || code == TRUTH_OR_EXPR));
8591
8592 if (operand_equal_p (a00, a10, 0))
8593 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8594 fold_build2_loc (loc, code, type, a01, a11));
8595 else if (commutative && operand_equal_p (a00, a11, 0))
8596 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8597 fold_build2_loc (loc, code, type, a01, a10));
8598 else if (commutative && operand_equal_p (a01, a10, 0))
8599 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8600 fold_build2_loc (loc, code, type, a00, a11));
8601
8602 /* This case if tricky because we must either have commutative
8603 operators or else A10 must not have side-effects. */
8604
8605 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8606 && operand_equal_p (a01, a11, 0))
8607 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8608 fold_build2_loc (loc, code, type, a00, a10),
8609 a01);
8610 }
8611
8612 /* See if we can build a range comparison. */
8613 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8614 return tem;
8615
8616 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8617 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8618 {
8619 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8620 if (tem)
8621 return fold_build2_loc (loc, code, type, tem, arg1);
8622 }
8623
8624 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8625 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8626 {
8627 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8628 if (tem)
8629 return fold_build2_loc (loc, code, type, arg0, tem);
8630 }
8631
8632 /* Check for the possibility of merging component references. If our
8633 lhs is another similar operation, try to merge its rhs with our
8634 rhs. Then try to merge our lhs and rhs. */
8635 if (TREE_CODE (arg0) == code
8636 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8637 TREE_OPERAND (arg0, 1), arg1)))
8638 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8639
8640 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8641 return tem;
8642
8643 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8644 && (code == TRUTH_AND_EXPR
8645 || code == TRUTH_ANDIF_EXPR
8646 || code == TRUTH_OR_EXPR
8647 || code == TRUTH_ORIF_EXPR))
8648 {
8649 enum tree_code ncode, icode;
8650
8651 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8652 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8653 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8654
8655 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8656 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8657 We don't want to pack more than two leafs to a non-IF AND/OR
8658 expression.
8659 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8660 equal to IF-CODE, then we don't want to add right-hand operand.
8661 If the inner right-hand side of left-hand operand has
8662 side-effects, or isn't simple, then we can't add to it,
8663 as otherwise we might destroy if-sequence. */
8664 if (TREE_CODE (arg0) == icode
8665 && simple_operand_p_2 (arg1)
8666 /* Needed for sequence points to handle trappings, and
8667 side-effects. */
8668 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8669 {
8670 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8671 arg1);
8672 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8673 tem);
8674 }
8675 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8676 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8677 else if (TREE_CODE (arg1) == icode
8678 && simple_operand_p_2 (arg0)
8679 /* Needed for sequence points to handle trappings, and
8680 side-effects. */
8681 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8682 {
8683 tem = fold_build2_loc (loc, ncode, type,
8684 arg0, TREE_OPERAND (arg1, 0));
8685 return fold_build2_loc (loc, icode, type, tem,
8686 TREE_OPERAND (arg1, 1));
8687 }
8688 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8689 into (A OR B).
8690 For sequence point consistancy, we need to check for trapping,
8691 and side-effects. */
8692 else if (code == icode && simple_operand_p_2 (arg0)
8693 && simple_operand_p_2 (arg1))
8694 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8695 }
8696
8697 return NULL_TREE;
8698 }
8699
8700 /* Fold a binary expression of code CODE and type TYPE with operands
8701 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8702 Return the folded expression if folding is successful. Otherwise,
8703 return NULL_TREE. */
8704
8705 static tree
8706 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8707 {
8708 enum tree_code compl_code;
8709
8710 if (code == MIN_EXPR)
8711 compl_code = MAX_EXPR;
8712 else if (code == MAX_EXPR)
8713 compl_code = MIN_EXPR;
8714 else
8715 gcc_unreachable ();
8716
8717 /* MIN (MAX (a, b), b) == b. */
8718 if (TREE_CODE (op0) == compl_code
8719 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8720 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8721
8722 /* MIN (MAX (b, a), b) == b. */
8723 if (TREE_CODE (op0) == compl_code
8724 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8725 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8726 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8727
8728 /* MIN (a, MAX (a, b)) == a. */
8729 if (TREE_CODE (op1) == compl_code
8730 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8731 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8732 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8733
8734 /* MIN (a, MAX (b, a)) == a. */
8735 if (TREE_CODE (op1) == compl_code
8736 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8737 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8738 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8739
8740 return NULL_TREE;
8741 }
8742
8743 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8744 by changing CODE to reduce the magnitude of constants involved in
8745 ARG0 of the comparison.
8746 Returns a canonicalized comparison tree if a simplification was
8747 possible, otherwise returns NULL_TREE.
8748 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8749 valid if signed overflow is undefined. */
8750
8751 static tree
8752 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8753 tree arg0, tree arg1,
8754 bool *strict_overflow_p)
8755 {
8756 enum tree_code code0 = TREE_CODE (arg0);
8757 tree t, cst0 = NULL_TREE;
8758 int sgn0;
8759 bool swap = false;
8760
8761 /* Match A +- CST code arg1 and CST code arg1. We can change the
8762 first form only if overflow is undefined. */
8763 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8764 /* In principle pointers also have undefined overflow behavior,
8765 but that causes problems elsewhere. */
8766 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8767 && (code0 == MINUS_EXPR
8768 || code0 == PLUS_EXPR)
8769 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8770 || code0 == INTEGER_CST))
8771 return NULL_TREE;
8772
8773 /* Identify the constant in arg0 and its sign. */
8774 if (code0 == INTEGER_CST)
8775 cst0 = arg0;
8776 else
8777 cst0 = TREE_OPERAND (arg0, 1);
8778 sgn0 = tree_int_cst_sgn (cst0);
8779
8780 /* Overflowed constants and zero will cause problems. */
8781 if (integer_zerop (cst0)
8782 || TREE_OVERFLOW (cst0))
8783 return NULL_TREE;
8784
8785 /* See if we can reduce the magnitude of the constant in
8786 arg0 by changing the comparison code. */
8787 if (code0 == INTEGER_CST)
8788 {
8789 /* CST <= arg1 -> CST-1 < arg1. */
8790 if (code == LE_EXPR && sgn0 == 1)
8791 code = LT_EXPR;
8792 /* -CST < arg1 -> -CST-1 <= arg1. */
8793 else if (code == LT_EXPR && sgn0 == -1)
8794 code = LE_EXPR;
8795 /* CST > arg1 -> CST-1 >= arg1. */
8796 else if (code == GT_EXPR && sgn0 == 1)
8797 code = GE_EXPR;
8798 /* -CST >= arg1 -> -CST-1 > arg1. */
8799 else if (code == GE_EXPR && sgn0 == -1)
8800 code = GT_EXPR;
8801 else
8802 return NULL_TREE;
8803 /* arg1 code' CST' might be more canonical. */
8804 swap = true;
8805 }
8806 else
8807 {
8808 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8809 if (code == LT_EXPR
8810 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8811 code = LE_EXPR;
8812 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8813 else if (code == GT_EXPR
8814 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8815 code = GE_EXPR;
8816 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8817 else if (code == LE_EXPR
8818 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8819 code = LT_EXPR;
8820 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8821 else if (code == GE_EXPR
8822 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8823 code = GT_EXPR;
8824 else
8825 return NULL_TREE;
8826 *strict_overflow_p = true;
8827 }
8828
8829 /* Now build the constant reduced in magnitude. But not if that
8830 would produce one outside of its types range. */
8831 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8832 && ((sgn0 == 1
8833 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8834 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8835 || (sgn0 == -1
8836 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8837 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8838 /* We cannot swap the comparison here as that would cause us to
8839 endlessly recurse. */
8840 return NULL_TREE;
8841
8842 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8843 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8844 if (code0 != INTEGER_CST)
8845 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8846 t = fold_convert (TREE_TYPE (arg1), t);
8847
8848 /* If swapping might yield to a more canonical form, do so. */
8849 if (swap)
8850 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8851 else
8852 return fold_build2_loc (loc, code, type, t, arg1);
8853 }
8854
8855 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8856 overflow further. Try to decrease the magnitude of constants involved
8857 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8858 and put sole constants at the second argument position.
8859 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8860
8861 static tree
8862 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8863 tree arg0, tree arg1)
8864 {
8865 tree t;
8866 bool strict_overflow_p;
8867 const char * const warnmsg = G_("assuming signed overflow does not occur "
8868 "when reducing constant in comparison");
8869
8870 /* Try canonicalization by simplifying arg0. */
8871 strict_overflow_p = false;
8872 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8873 &strict_overflow_p);
8874 if (t)
8875 {
8876 if (strict_overflow_p)
8877 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8878 return t;
8879 }
8880
8881 /* Try canonicalization by simplifying arg1 using the swapped
8882 comparison. */
8883 code = swap_tree_comparison (code);
8884 strict_overflow_p = false;
8885 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8886 &strict_overflow_p);
8887 if (t && strict_overflow_p)
8888 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8889 return t;
8890 }
8891
8892 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8893 space. This is used to avoid issuing overflow warnings for
8894 expressions like &p->x which can not wrap. */
8895
8896 static bool
8897 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8898 {
8899 double_int di_offset, total;
8900
8901 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8902 return true;
8903
8904 if (bitpos < 0)
8905 return true;
8906
8907 if (offset == NULL_TREE)
8908 di_offset = double_int_zero;
8909 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8910 return true;
8911 else
8912 di_offset = TREE_INT_CST (offset);
8913
8914 bool overflow;
8915 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8916 total = di_offset.add_with_sign (units, true, &overflow);
8917 if (overflow)
8918 return true;
8919
8920 if (total.high != 0)
8921 return true;
8922
8923 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8924 if (size <= 0)
8925 return true;
8926
8927 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8928 array. */
8929 if (TREE_CODE (base) == ADDR_EXPR)
8930 {
8931 HOST_WIDE_INT base_size;
8932
8933 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8934 if (base_size > 0 && size < base_size)
8935 size = base_size;
8936 }
8937
8938 return total.low > (unsigned HOST_WIDE_INT) size;
8939 }
8940
8941 /* Subroutine of fold_binary. This routine performs all of the
8942 transformations that are common to the equality/inequality
8943 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8944 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8945 fold_binary should call fold_binary. Fold a comparison with
8946 tree code CODE and type TYPE with operands OP0 and OP1. Return
8947 the folded comparison or NULL_TREE. */
8948
8949 static tree
8950 fold_comparison (location_t loc, enum tree_code code, tree type,
8951 tree op0, tree op1)
8952 {
8953 tree arg0, arg1, tem;
8954
8955 arg0 = op0;
8956 arg1 = op1;
8957
8958 STRIP_SIGN_NOPS (arg0);
8959 STRIP_SIGN_NOPS (arg1);
8960
8961 tem = fold_relational_const (code, type, arg0, arg1);
8962 if (tem != NULL_TREE)
8963 return tem;
8964
8965 /* If one arg is a real or integer constant, put it last. */
8966 if (tree_swap_operands_p (arg0, arg1, true))
8967 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8968
8969 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8970 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8971 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8972 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8973 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8974 && (TREE_CODE (arg1) == INTEGER_CST
8975 && !TREE_OVERFLOW (arg1)))
8976 {
8977 tree const1 = TREE_OPERAND (arg0, 1);
8978 tree const2 = arg1;
8979 tree variable = TREE_OPERAND (arg0, 0);
8980 tree lhs;
8981 int lhs_add;
8982 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8983
8984 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8985 TREE_TYPE (arg1), const2, const1);
8986
8987 /* If the constant operation overflowed this can be
8988 simplified as a comparison against INT_MAX/INT_MIN. */
8989 if (TREE_CODE (lhs) == INTEGER_CST
8990 && TREE_OVERFLOW (lhs))
8991 {
8992 int const1_sgn = tree_int_cst_sgn (const1);
8993 enum tree_code code2 = code;
8994
8995 /* Get the sign of the constant on the lhs if the
8996 operation were VARIABLE + CONST1. */
8997 if (TREE_CODE (arg0) == MINUS_EXPR)
8998 const1_sgn = -const1_sgn;
8999
9000 /* The sign of the constant determines if we overflowed
9001 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9002 Canonicalize to the INT_MIN overflow by swapping the comparison
9003 if necessary. */
9004 if (const1_sgn == -1)
9005 code2 = swap_tree_comparison (code);
9006
9007 /* We now can look at the canonicalized case
9008 VARIABLE + 1 CODE2 INT_MIN
9009 and decide on the result. */
9010 if (code2 == LT_EXPR
9011 || code2 == LE_EXPR
9012 || code2 == EQ_EXPR)
9013 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9014 else if (code2 == NE_EXPR
9015 || code2 == GE_EXPR
9016 || code2 == GT_EXPR)
9017 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9018 }
9019
9020 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9021 && (TREE_CODE (lhs) != INTEGER_CST
9022 || !TREE_OVERFLOW (lhs)))
9023 {
9024 if (code != EQ_EXPR && code != NE_EXPR)
9025 fold_overflow_warning ("assuming signed overflow does not occur "
9026 "when changing X +- C1 cmp C2 to "
9027 "X cmp C1 +- C2",
9028 WARN_STRICT_OVERFLOW_COMPARISON);
9029 return fold_build2_loc (loc, code, type, variable, lhs);
9030 }
9031 }
9032
9033 /* For comparisons of pointers we can decompose it to a compile time
9034 comparison of the base objects and the offsets into the object.
9035 This requires at least one operand being an ADDR_EXPR or a
9036 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9037 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9038 && (TREE_CODE (arg0) == ADDR_EXPR
9039 || TREE_CODE (arg1) == ADDR_EXPR
9040 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9041 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9042 {
9043 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9044 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9045 enum machine_mode mode;
9046 int volatilep, unsignedp;
9047 bool indirect_base0 = false, indirect_base1 = false;
9048
9049 /* Get base and offset for the access. Strip ADDR_EXPR for
9050 get_inner_reference, but put it back by stripping INDIRECT_REF
9051 off the base object if possible. indirect_baseN will be true
9052 if baseN is not an address but refers to the object itself. */
9053 base0 = arg0;
9054 if (TREE_CODE (arg0) == ADDR_EXPR)
9055 {
9056 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9057 &bitsize, &bitpos0, &offset0, &mode,
9058 &unsignedp, &volatilep, false);
9059 if (TREE_CODE (base0) == INDIRECT_REF)
9060 base0 = TREE_OPERAND (base0, 0);
9061 else
9062 indirect_base0 = true;
9063 }
9064 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9065 {
9066 base0 = TREE_OPERAND (arg0, 0);
9067 STRIP_SIGN_NOPS (base0);
9068 if (TREE_CODE (base0) == ADDR_EXPR)
9069 {
9070 base0 = TREE_OPERAND (base0, 0);
9071 indirect_base0 = true;
9072 }
9073 offset0 = TREE_OPERAND (arg0, 1);
9074 if (host_integerp (offset0, 0))
9075 {
9076 HOST_WIDE_INT off = size_low_cst (offset0);
9077 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9078 * BITS_PER_UNIT)
9079 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9080 {
9081 bitpos0 = off * BITS_PER_UNIT;
9082 offset0 = NULL_TREE;
9083 }
9084 }
9085 }
9086
9087 base1 = arg1;
9088 if (TREE_CODE (arg1) == ADDR_EXPR)
9089 {
9090 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9091 &bitsize, &bitpos1, &offset1, &mode,
9092 &unsignedp, &volatilep, false);
9093 if (TREE_CODE (base1) == INDIRECT_REF)
9094 base1 = TREE_OPERAND (base1, 0);
9095 else
9096 indirect_base1 = true;
9097 }
9098 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9099 {
9100 base1 = TREE_OPERAND (arg1, 0);
9101 STRIP_SIGN_NOPS (base1);
9102 if (TREE_CODE (base1) == ADDR_EXPR)
9103 {
9104 base1 = TREE_OPERAND (base1, 0);
9105 indirect_base1 = true;
9106 }
9107 offset1 = TREE_OPERAND (arg1, 1);
9108 if (host_integerp (offset1, 0))
9109 {
9110 HOST_WIDE_INT off = size_low_cst (offset1);
9111 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9112 * BITS_PER_UNIT)
9113 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9114 {
9115 bitpos1 = off * BITS_PER_UNIT;
9116 offset1 = NULL_TREE;
9117 }
9118 }
9119 }
9120
9121 /* A local variable can never be pointed to by
9122 the default SSA name of an incoming parameter. */
9123 if ((TREE_CODE (arg0) == ADDR_EXPR
9124 && indirect_base0
9125 && TREE_CODE (base0) == VAR_DECL
9126 && auto_var_in_fn_p (base0, current_function_decl)
9127 && !indirect_base1
9128 && TREE_CODE (base1) == SSA_NAME
9129 && SSA_NAME_IS_DEFAULT_DEF (base1)
9130 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9131 || (TREE_CODE (arg1) == ADDR_EXPR
9132 && indirect_base1
9133 && TREE_CODE (base1) == VAR_DECL
9134 && auto_var_in_fn_p (base1, current_function_decl)
9135 && !indirect_base0
9136 && TREE_CODE (base0) == SSA_NAME
9137 && SSA_NAME_IS_DEFAULT_DEF (base0)
9138 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9139 {
9140 if (code == NE_EXPR)
9141 return constant_boolean_node (1, type);
9142 else if (code == EQ_EXPR)
9143 return constant_boolean_node (0, type);
9144 }
9145 /* If we have equivalent bases we might be able to simplify. */
9146 else if (indirect_base0 == indirect_base1
9147 && operand_equal_p (base0, base1, 0))
9148 {
9149 /* We can fold this expression to a constant if the non-constant
9150 offset parts are equal. */
9151 if ((offset0 == offset1
9152 || (offset0 && offset1
9153 && operand_equal_p (offset0, offset1, 0)))
9154 && (code == EQ_EXPR
9155 || code == NE_EXPR
9156 || (indirect_base0 && DECL_P (base0))
9157 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9158
9159 {
9160 if (code != EQ_EXPR
9161 && code != NE_EXPR
9162 && bitpos0 != bitpos1
9163 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9164 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9165 fold_overflow_warning (("assuming pointer wraparound does not "
9166 "occur when comparing P +- C1 with "
9167 "P +- C2"),
9168 WARN_STRICT_OVERFLOW_CONDITIONAL);
9169
9170 switch (code)
9171 {
9172 case EQ_EXPR:
9173 return constant_boolean_node (bitpos0 == bitpos1, type);
9174 case NE_EXPR:
9175 return constant_boolean_node (bitpos0 != bitpos1, type);
9176 case LT_EXPR:
9177 return constant_boolean_node (bitpos0 < bitpos1, type);
9178 case LE_EXPR:
9179 return constant_boolean_node (bitpos0 <= bitpos1, type);
9180 case GE_EXPR:
9181 return constant_boolean_node (bitpos0 >= bitpos1, type);
9182 case GT_EXPR:
9183 return constant_boolean_node (bitpos0 > bitpos1, type);
9184 default:;
9185 }
9186 }
9187 /* We can simplify the comparison to a comparison of the variable
9188 offset parts if the constant offset parts are equal.
9189 Be careful to use signed sizetype here because otherwise we
9190 mess with array offsets in the wrong way. This is possible
9191 because pointer arithmetic is restricted to retain within an
9192 object and overflow on pointer differences is undefined as of
9193 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9194 else if (bitpos0 == bitpos1
9195 && ((code == EQ_EXPR || code == NE_EXPR)
9196 || (indirect_base0 && DECL_P (base0))
9197 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9198 {
9199 /* By converting to signed sizetype we cover middle-end pointer
9200 arithmetic which operates on unsigned pointer types of size
9201 type size and ARRAY_REF offsets which are properly sign or
9202 zero extended from their type in case it is narrower than
9203 sizetype. */
9204 if (offset0 == NULL_TREE)
9205 offset0 = build_int_cst (ssizetype, 0);
9206 else
9207 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9208 if (offset1 == NULL_TREE)
9209 offset1 = build_int_cst (ssizetype, 0);
9210 else
9211 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9212
9213 if (code != EQ_EXPR
9214 && code != NE_EXPR
9215 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9216 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9217 fold_overflow_warning (("assuming pointer wraparound does not "
9218 "occur when comparing P +- C1 with "
9219 "P +- C2"),
9220 WARN_STRICT_OVERFLOW_COMPARISON);
9221
9222 return fold_build2_loc (loc, code, type, offset0, offset1);
9223 }
9224 }
9225 /* For non-equal bases we can simplify if they are addresses
9226 of local binding decls or constants. */
9227 else if (indirect_base0 && indirect_base1
9228 /* We know that !operand_equal_p (base0, base1, 0)
9229 because the if condition was false. But make
9230 sure two decls are not the same. */
9231 && base0 != base1
9232 && TREE_CODE (arg0) == ADDR_EXPR
9233 && TREE_CODE (arg1) == ADDR_EXPR
9234 && (((TREE_CODE (base0) == VAR_DECL
9235 || TREE_CODE (base0) == PARM_DECL)
9236 && (targetm.binds_local_p (base0)
9237 || CONSTANT_CLASS_P (base1)))
9238 || CONSTANT_CLASS_P (base0))
9239 && (((TREE_CODE (base1) == VAR_DECL
9240 || TREE_CODE (base1) == PARM_DECL)
9241 && (targetm.binds_local_p (base1)
9242 || CONSTANT_CLASS_P (base0)))
9243 || CONSTANT_CLASS_P (base1)))
9244 {
9245 if (code == EQ_EXPR)
9246 return omit_two_operands_loc (loc, type, boolean_false_node,
9247 arg0, arg1);
9248 else if (code == NE_EXPR)
9249 return omit_two_operands_loc (loc, type, boolean_true_node,
9250 arg0, arg1);
9251 }
9252 /* For equal offsets we can simplify to a comparison of the
9253 base addresses. */
9254 else if (bitpos0 == bitpos1
9255 && (indirect_base0
9256 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9257 && (indirect_base1
9258 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9259 && ((offset0 == offset1)
9260 || (offset0 && offset1
9261 && operand_equal_p (offset0, offset1, 0))))
9262 {
9263 if (indirect_base0)
9264 base0 = build_fold_addr_expr_loc (loc, base0);
9265 if (indirect_base1)
9266 base1 = build_fold_addr_expr_loc (loc, base1);
9267 return fold_build2_loc (loc, code, type, base0, base1);
9268 }
9269 }
9270
9271 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9272 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9273 the resulting offset is smaller in absolute value than the
9274 original one. */
9275 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9276 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9277 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9278 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9279 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9280 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9281 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9282 {
9283 tree const1 = TREE_OPERAND (arg0, 1);
9284 tree const2 = TREE_OPERAND (arg1, 1);
9285 tree variable1 = TREE_OPERAND (arg0, 0);
9286 tree variable2 = TREE_OPERAND (arg1, 0);
9287 tree cst;
9288 const char * const warnmsg = G_("assuming signed overflow does not "
9289 "occur when combining constants around "
9290 "a comparison");
9291
9292 /* Put the constant on the side where it doesn't overflow and is
9293 of lower absolute value than before. */
9294 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9295 ? MINUS_EXPR : PLUS_EXPR,
9296 const2, const1);
9297 if (!TREE_OVERFLOW (cst)
9298 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9299 {
9300 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9301 return fold_build2_loc (loc, code, type,
9302 variable1,
9303 fold_build2_loc (loc,
9304 TREE_CODE (arg1), TREE_TYPE (arg1),
9305 variable2, cst));
9306 }
9307
9308 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9309 ? MINUS_EXPR : PLUS_EXPR,
9310 const1, const2);
9311 if (!TREE_OVERFLOW (cst)
9312 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9313 {
9314 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9315 return fold_build2_loc (loc, code, type,
9316 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9317 variable1, cst),
9318 variable2);
9319 }
9320 }
9321
9322 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9323 signed arithmetic case. That form is created by the compiler
9324 often enough for folding it to be of value. One example is in
9325 computing loop trip counts after Operator Strength Reduction. */
9326 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9327 && TREE_CODE (arg0) == MULT_EXPR
9328 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9329 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9330 && integer_zerop (arg1))
9331 {
9332 tree const1 = TREE_OPERAND (arg0, 1);
9333 tree const2 = arg1; /* zero */
9334 tree variable1 = TREE_OPERAND (arg0, 0);
9335 enum tree_code cmp_code = code;
9336
9337 /* Handle unfolded multiplication by zero. */
9338 if (integer_zerop (const1))
9339 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9340
9341 fold_overflow_warning (("assuming signed overflow does not occur when "
9342 "eliminating multiplication in comparison "
9343 "with zero"),
9344 WARN_STRICT_OVERFLOW_COMPARISON);
9345
9346 /* If const1 is negative we swap the sense of the comparison. */
9347 if (tree_int_cst_sgn (const1) < 0)
9348 cmp_code = swap_tree_comparison (cmp_code);
9349
9350 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9351 }
9352
9353 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9354 if (tem)
9355 return tem;
9356
9357 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9358 {
9359 tree targ0 = strip_float_extensions (arg0);
9360 tree targ1 = strip_float_extensions (arg1);
9361 tree newtype = TREE_TYPE (targ0);
9362
9363 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9364 newtype = TREE_TYPE (targ1);
9365
9366 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9367 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9368 return fold_build2_loc (loc, code, type,
9369 fold_convert_loc (loc, newtype, targ0),
9370 fold_convert_loc (loc, newtype, targ1));
9371
9372 /* (-a) CMP (-b) -> b CMP a */
9373 if (TREE_CODE (arg0) == NEGATE_EXPR
9374 && TREE_CODE (arg1) == NEGATE_EXPR)
9375 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9376 TREE_OPERAND (arg0, 0));
9377
9378 if (TREE_CODE (arg1) == REAL_CST)
9379 {
9380 REAL_VALUE_TYPE cst;
9381 cst = TREE_REAL_CST (arg1);
9382
9383 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9384 if (TREE_CODE (arg0) == NEGATE_EXPR)
9385 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9386 TREE_OPERAND (arg0, 0),
9387 build_real (TREE_TYPE (arg1),
9388 real_value_negate (&cst)));
9389
9390 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9391 /* a CMP (-0) -> a CMP 0 */
9392 if (REAL_VALUE_MINUS_ZERO (cst))
9393 return fold_build2_loc (loc, code, type, arg0,
9394 build_real (TREE_TYPE (arg1), dconst0));
9395
9396 /* x != NaN is always true, other ops are always false. */
9397 if (REAL_VALUE_ISNAN (cst)
9398 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9399 {
9400 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9401 return omit_one_operand_loc (loc, type, tem, arg0);
9402 }
9403
9404 /* Fold comparisons against infinity. */
9405 if (REAL_VALUE_ISINF (cst)
9406 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9407 {
9408 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9409 if (tem != NULL_TREE)
9410 return tem;
9411 }
9412 }
9413
9414 /* If this is a comparison of a real constant with a PLUS_EXPR
9415 or a MINUS_EXPR of a real constant, we can convert it into a
9416 comparison with a revised real constant as long as no overflow
9417 occurs when unsafe_math_optimizations are enabled. */
9418 if (flag_unsafe_math_optimizations
9419 && TREE_CODE (arg1) == REAL_CST
9420 && (TREE_CODE (arg0) == PLUS_EXPR
9421 || TREE_CODE (arg0) == MINUS_EXPR)
9422 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9423 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9424 ? MINUS_EXPR : PLUS_EXPR,
9425 arg1, TREE_OPERAND (arg0, 1)))
9426 && !TREE_OVERFLOW (tem))
9427 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9428
9429 /* Likewise, we can simplify a comparison of a real constant with
9430 a MINUS_EXPR whose first operand is also a real constant, i.e.
9431 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9432 floating-point types only if -fassociative-math is set. */
9433 if (flag_associative_math
9434 && TREE_CODE (arg1) == REAL_CST
9435 && TREE_CODE (arg0) == MINUS_EXPR
9436 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9437 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9438 arg1))
9439 && !TREE_OVERFLOW (tem))
9440 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9441 TREE_OPERAND (arg0, 1), tem);
9442
9443 /* Fold comparisons against built-in math functions. */
9444 if (TREE_CODE (arg1) == REAL_CST
9445 && flag_unsafe_math_optimizations
9446 && ! flag_errno_math)
9447 {
9448 enum built_in_function fcode = builtin_mathfn_code (arg0);
9449
9450 if (fcode != END_BUILTINS)
9451 {
9452 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9453 if (tem != NULL_TREE)
9454 return tem;
9455 }
9456 }
9457 }
9458
9459 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9460 && CONVERT_EXPR_P (arg0))
9461 {
9462 /* If we are widening one operand of an integer comparison,
9463 see if the other operand is similarly being widened. Perhaps we
9464 can do the comparison in the narrower type. */
9465 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9466 if (tem)
9467 return tem;
9468
9469 /* Or if we are changing signedness. */
9470 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9471 if (tem)
9472 return tem;
9473 }
9474
9475 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9476 constant, we can simplify it. */
9477 if (TREE_CODE (arg1) == INTEGER_CST
9478 && (TREE_CODE (arg0) == MIN_EXPR
9479 || TREE_CODE (arg0) == MAX_EXPR)
9480 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9481 {
9482 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9483 if (tem)
9484 return tem;
9485 }
9486
9487 /* Simplify comparison of something with itself. (For IEEE
9488 floating-point, we can only do some of these simplifications.) */
9489 if (operand_equal_p (arg0, arg1, 0))
9490 {
9491 switch (code)
9492 {
9493 case EQ_EXPR:
9494 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9495 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9496 return constant_boolean_node (1, type);
9497 break;
9498
9499 case GE_EXPR:
9500 case LE_EXPR:
9501 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9502 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9503 return constant_boolean_node (1, type);
9504 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9505
9506 case NE_EXPR:
9507 /* For NE, we can only do this simplification if integer
9508 or we don't honor IEEE floating point NaNs. */
9509 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9510 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9511 break;
9512 /* ... fall through ... */
9513 case GT_EXPR:
9514 case LT_EXPR:
9515 return constant_boolean_node (0, type);
9516 default:
9517 gcc_unreachable ();
9518 }
9519 }
9520
9521 /* If we are comparing an expression that just has comparisons
9522 of two integer values, arithmetic expressions of those comparisons,
9523 and constants, we can simplify it. There are only three cases
9524 to check: the two values can either be equal, the first can be
9525 greater, or the second can be greater. Fold the expression for
9526 those three values. Since each value must be 0 or 1, we have
9527 eight possibilities, each of which corresponds to the constant 0
9528 or 1 or one of the six possible comparisons.
9529
9530 This handles common cases like (a > b) == 0 but also handles
9531 expressions like ((x > y) - (y > x)) > 0, which supposedly
9532 occur in macroized code. */
9533
9534 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9535 {
9536 tree cval1 = 0, cval2 = 0;
9537 int save_p = 0;
9538
9539 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9540 /* Don't handle degenerate cases here; they should already
9541 have been handled anyway. */
9542 && cval1 != 0 && cval2 != 0
9543 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9544 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9545 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9546 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9547 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9548 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9549 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9550 {
9551 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9552 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9553
9554 /* We can't just pass T to eval_subst in case cval1 or cval2
9555 was the same as ARG1. */
9556
9557 tree high_result
9558 = fold_build2_loc (loc, code, type,
9559 eval_subst (loc, arg0, cval1, maxval,
9560 cval2, minval),
9561 arg1);
9562 tree equal_result
9563 = fold_build2_loc (loc, code, type,
9564 eval_subst (loc, arg0, cval1, maxval,
9565 cval2, maxval),
9566 arg1);
9567 tree low_result
9568 = fold_build2_loc (loc, code, type,
9569 eval_subst (loc, arg0, cval1, minval,
9570 cval2, maxval),
9571 arg1);
9572
9573 /* All three of these results should be 0 or 1. Confirm they are.
9574 Then use those values to select the proper code to use. */
9575
9576 if (TREE_CODE (high_result) == INTEGER_CST
9577 && TREE_CODE (equal_result) == INTEGER_CST
9578 && TREE_CODE (low_result) == INTEGER_CST)
9579 {
9580 /* Make a 3-bit mask with the high-order bit being the
9581 value for `>', the next for '=', and the low for '<'. */
9582 switch ((integer_onep (high_result) * 4)
9583 + (integer_onep (equal_result) * 2)
9584 + integer_onep (low_result))
9585 {
9586 case 0:
9587 /* Always false. */
9588 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9589 case 1:
9590 code = LT_EXPR;
9591 break;
9592 case 2:
9593 code = EQ_EXPR;
9594 break;
9595 case 3:
9596 code = LE_EXPR;
9597 break;
9598 case 4:
9599 code = GT_EXPR;
9600 break;
9601 case 5:
9602 code = NE_EXPR;
9603 break;
9604 case 6:
9605 code = GE_EXPR;
9606 break;
9607 case 7:
9608 /* Always true. */
9609 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9610 }
9611
9612 if (save_p)
9613 {
9614 tem = save_expr (build2 (code, type, cval1, cval2));
9615 SET_EXPR_LOCATION (tem, loc);
9616 return tem;
9617 }
9618 return fold_build2_loc (loc, code, type, cval1, cval2);
9619 }
9620 }
9621 }
9622
9623 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9624 into a single range test. */
9625 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9626 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9627 && TREE_CODE (arg1) == INTEGER_CST
9628 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9629 && !integer_zerop (TREE_OPERAND (arg0, 1))
9630 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9631 && !TREE_OVERFLOW (arg1))
9632 {
9633 tem = fold_div_compare (loc, code, type, arg0, arg1);
9634 if (tem != NULL_TREE)
9635 return tem;
9636 }
9637
9638 /* Fold ~X op ~Y as Y op X. */
9639 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9640 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9641 {
9642 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9643 return fold_build2_loc (loc, code, type,
9644 fold_convert_loc (loc, cmp_type,
9645 TREE_OPERAND (arg1, 0)),
9646 TREE_OPERAND (arg0, 0));
9647 }
9648
9649 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9650 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9651 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9652 {
9653 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9654 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9655 TREE_OPERAND (arg0, 0),
9656 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9657 fold_convert_loc (loc, cmp_type, arg1)));
9658 }
9659
9660 return NULL_TREE;
9661 }
9662
9663
9664 /* Subroutine of fold_binary. Optimize complex multiplications of the
9665 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9666 argument EXPR represents the expression "z" of type TYPE. */
9667
9668 static tree
9669 fold_mult_zconjz (location_t loc, tree type, tree expr)
9670 {
9671 tree itype = TREE_TYPE (type);
9672 tree rpart, ipart, tem;
9673
9674 if (TREE_CODE (expr) == COMPLEX_EXPR)
9675 {
9676 rpart = TREE_OPERAND (expr, 0);
9677 ipart = TREE_OPERAND (expr, 1);
9678 }
9679 else if (TREE_CODE (expr) == COMPLEX_CST)
9680 {
9681 rpart = TREE_REALPART (expr);
9682 ipart = TREE_IMAGPART (expr);
9683 }
9684 else
9685 {
9686 expr = save_expr (expr);
9687 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9688 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9689 }
9690
9691 rpart = save_expr (rpart);
9692 ipart = save_expr (ipart);
9693 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9694 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9695 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9696 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9697 build_zero_cst (itype));
9698 }
9699
9700
9701 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9702 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9703 guarantees that P and N have the same least significant log2(M) bits.
9704 N is not otherwise constrained. In particular, N is not normalized to
9705 0 <= N < M as is common. In general, the precise value of P is unknown.
9706 M is chosen as large as possible such that constant N can be determined.
9707
9708 Returns M and sets *RESIDUE to N.
9709
9710 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9711 account. This is not always possible due to PR 35705.
9712 */
9713
9714 static unsigned HOST_WIDE_INT
9715 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9716 bool allow_func_align)
9717 {
9718 enum tree_code code;
9719
9720 *residue = 0;
9721
9722 code = TREE_CODE (expr);
9723 if (code == ADDR_EXPR)
9724 {
9725 unsigned int bitalign;
9726 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9727 *residue /= BITS_PER_UNIT;
9728 return bitalign / BITS_PER_UNIT;
9729 }
9730 else if (code == POINTER_PLUS_EXPR)
9731 {
9732 tree op0, op1;
9733 unsigned HOST_WIDE_INT modulus;
9734 enum tree_code inner_code;
9735
9736 op0 = TREE_OPERAND (expr, 0);
9737 STRIP_NOPS (op0);
9738 modulus = get_pointer_modulus_and_residue (op0, residue,
9739 allow_func_align);
9740
9741 op1 = TREE_OPERAND (expr, 1);
9742 STRIP_NOPS (op1);
9743 inner_code = TREE_CODE (op1);
9744 if (inner_code == INTEGER_CST)
9745 {
9746 *residue += TREE_INT_CST_LOW (op1);
9747 return modulus;
9748 }
9749 else if (inner_code == MULT_EXPR)
9750 {
9751 op1 = TREE_OPERAND (op1, 1);
9752 if (TREE_CODE (op1) == INTEGER_CST)
9753 {
9754 unsigned HOST_WIDE_INT align;
9755
9756 /* Compute the greatest power-of-2 divisor of op1. */
9757 align = TREE_INT_CST_LOW (op1);
9758 align &= -align;
9759
9760 /* If align is non-zero and less than *modulus, replace
9761 *modulus with align., If align is 0, then either op1 is 0
9762 or the greatest power-of-2 divisor of op1 doesn't fit in an
9763 unsigned HOST_WIDE_INT. In either case, no additional
9764 constraint is imposed. */
9765 if (align)
9766 modulus = MIN (modulus, align);
9767
9768 return modulus;
9769 }
9770 }
9771 }
9772
9773 /* If we get here, we were unable to determine anything useful about the
9774 expression. */
9775 return 1;
9776 }
9777
9778 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9779 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9780
9781 static bool
9782 vec_cst_ctor_to_array (tree arg, tree *elts)
9783 {
9784 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9785
9786 if (TREE_CODE (arg) == VECTOR_CST)
9787 {
9788 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9789 elts[i] = VECTOR_CST_ELT (arg, i);
9790 }
9791 else if (TREE_CODE (arg) == CONSTRUCTOR)
9792 {
9793 constructor_elt *elt;
9794
9795 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9796 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9797 return false;
9798 else
9799 elts[i] = elt->value;
9800 }
9801 else
9802 return false;
9803 for (; i < nelts; i++)
9804 elts[i]
9805 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9806 return true;
9807 }
9808
9809 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9810 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9811 NULL_TREE otherwise. */
9812
9813 static tree
9814 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9815 {
9816 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9817 tree *elts;
9818 bool need_ctor = false;
9819
9820 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9821 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9822 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9823 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9824 return NULL_TREE;
9825
9826 elts = XALLOCAVEC (tree, nelts * 3);
9827 if (!vec_cst_ctor_to_array (arg0, elts)
9828 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9829 return NULL_TREE;
9830
9831 for (i = 0; i < nelts; i++)
9832 {
9833 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9834 need_ctor = true;
9835 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9836 }
9837
9838 if (need_ctor)
9839 {
9840 vec<constructor_elt, va_gc> *v;
9841 vec_alloc (v, nelts);
9842 for (i = 0; i < nelts; i++)
9843 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9844 return build_constructor (type, v);
9845 }
9846 else
9847 return build_vector (type, &elts[2 * nelts]);
9848 }
9849
9850 /* Try to fold a pointer difference of type TYPE two address expressions of
9851 array references AREF0 and AREF1 using location LOC. Return a
9852 simplified expression for the difference or NULL_TREE. */
9853
9854 static tree
9855 fold_addr_of_array_ref_difference (location_t loc, tree type,
9856 tree aref0, tree aref1)
9857 {
9858 tree base0 = TREE_OPERAND (aref0, 0);
9859 tree base1 = TREE_OPERAND (aref1, 0);
9860 tree base_offset = build_int_cst (type, 0);
9861
9862 /* If the bases are array references as well, recurse. If the bases
9863 are pointer indirections compute the difference of the pointers.
9864 If the bases are equal, we are set. */
9865 if ((TREE_CODE (base0) == ARRAY_REF
9866 && TREE_CODE (base1) == ARRAY_REF
9867 && (base_offset
9868 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9869 || (INDIRECT_REF_P (base0)
9870 && INDIRECT_REF_P (base1)
9871 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9872 TREE_OPERAND (base0, 0),
9873 TREE_OPERAND (base1, 0))))
9874 || operand_equal_p (base0, base1, 0))
9875 {
9876 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9877 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9878 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9879 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9880 return fold_build2_loc (loc, PLUS_EXPR, type,
9881 base_offset,
9882 fold_build2_loc (loc, MULT_EXPR, type,
9883 diff, esz));
9884 }
9885 return NULL_TREE;
9886 }
9887
9888 /* If the real or vector real constant CST of type TYPE has an exact
9889 inverse, return it, else return NULL. */
9890
9891 static tree
9892 exact_inverse (tree type, tree cst)
9893 {
9894 REAL_VALUE_TYPE r;
9895 tree unit_type, *elts;
9896 enum machine_mode mode;
9897 unsigned vec_nelts, i;
9898
9899 switch (TREE_CODE (cst))
9900 {
9901 case REAL_CST:
9902 r = TREE_REAL_CST (cst);
9903
9904 if (exact_real_inverse (TYPE_MODE (type), &r))
9905 return build_real (type, r);
9906
9907 return NULL_TREE;
9908
9909 case VECTOR_CST:
9910 vec_nelts = VECTOR_CST_NELTS (cst);
9911 elts = XALLOCAVEC (tree, vec_nelts);
9912 unit_type = TREE_TYPE (type);
9913 mode = TYPE_MODE (unit_type);
9914
9915 for (i = 0; i < vec_nelts; i++)
9916 {
9917 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9918 if (!exact_real_inverse (mode, &r))
9919 return NULL_TREE;
9920 elts[i] = build_real (unit_type, r);
9921 }
9922
9923 return build_vector (type, elts);
9924
9925 default:
9926 return NULL_TREE;
9927 }
9928 }
9929
9930 /* Fold a binary expression of code CODE and type TYPE with operands
9931 OP0 and OP1. LOC is the location of the resulting expression.
9932 Return the folded expression if folding is successful. Otherwise,
9933 return NULL_TREE. */
9934
9935 tree
9936 fold_binary_loc (location_t loc,
9937 enum tree_code code, tree type, tree op0, tree op1)
9938 {
9939 enum tree_code_class kind = TREE_CODE_CLASS (code);
9940 tree arg0, arg1, tem;
9941 tree t1 = NULL_TREE;
9942 bool strict_overflow_p;
9943 unsigned int prec;
9944
9945 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9946 && TREE_CODE_LENGTH (code) == 2
9947 && op0 != NULL_TREE
9948 && op1 != NULL_TREE);
9949
9950 arg0 = op0;
9951 arg1 = op1;
9952
9953 /* Strip any conversions that don't change the mode. This is
9954 safe for every expression, except for a comparison expression
9955 because its signedness is derived from its operands. So, in
9956 the latter case, only strip conversions that don't change the
9957 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9958 preserved.
9959
9960 Note that this is done as an internal manipulation within the
9961 constant folder, in order to find the simplest representation
9962 of the arguments so that their form can be studied. In any
9963 cases, the appropriate type conversions should be put back in
9964 the tree that will get out of the constant folder. */
9965
9966 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9967 {
9968 STRIP_SIGN_NOPS (arg0);
9969 STRIP_SIGN_NOPS (arg1);
9970 }
9971 else
9972 {
9973 STRIP_NOPS (arg0);
9974 STRIP_NOPS (arg1);
9975 }
9976
9977 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9978 constant but we can't do arithmetic on them. */
9979 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9980 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9981 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9982 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9983 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9984 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9985 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9986 {
9987 if (kind == tcc_binary)
9988 {
9989 /* Make sure type and arg0 have the same saturating flag. */
9990 gcc_assert (TYPE_SATURATING (type)
9991 == TYPE_SATURATING (TREE_TYPE (arg0)));
9992 tem = const_binop (code, arg0, arg1);
9993 }
9994 else if (kind == tcc_comparison)
9995 tem = fold_relational_const (code, type, arg0, arg1);
9996 else
9997 tem = NULL_TREE;
9998
9999 if (tem != NULL_TREE)
10000 {
10001 if (TREE_TYPE (tem) != type)
10002 tem = fold_convert_loc (loc, type, tem);
10003 return tem;
10004 }
10005 }
10006
10007 /* If this is a commutative operation, and ARG0 is a constant, move it
10008 to ARG1 to reduce the number of tests below. */
10009 if (commutative_tree_code (code)
10010 && tree_swap_operands_p (arg0, arg1, true))
10011 return fold_build2_loc (loc, code, type, op1, op0);
10012
10013 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10014
10015 First check for cases where an arithmetic operation is applied to a
10016 compound, conditional, or comparison operation. Push the arithmetic
10017 operation inside the compound or conditional to see if any folding
10018 can then be done. Convert comparison to conditional for this purpose.
10019 The also optimizes non-constant cases that used to be done in
10020 expand_expr.
10021
10022 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10023 one of the operands is a comparison and the other is a comparison, a
10024 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10025 code below would make the expression more complex. Change it to a
10026 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10027 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10028
10029 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10030 || code == EQ_EXPR || code == NE_EXPR)
10031 && TREE_CODE (type) != VECTOR_TYPE
10032 && ((truth_value_p (TREE_CODE (arg0))
10033 && (truth_value_p (TREE_CODE (arg1))
10034 || (TREE_CODE (arg1) == BIT_AND_EXPR
10035 && integer_onep (TREE_OPERAND (arg1, 1)))))
10036 || (truth_value_p (TREE_CODE (arg1))
10037 && (truth_value_p (TREE_CODE (arg0))
10038 || (TREE_CODE (arg0) == BIT_AND_EXPR
10039 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10040 {
10041 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10042 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10043 : TRUTH_XOR_EXPR,
10044 boolean_type_node,
10045 fold_convert_loc (loc, boolean_type_node, arg0),
10046 fold_convert_loc (loc, boolean_type_node, arg1));
10047
10048 if (code == EQ_EXPR)
10049 tem = invert_truthvalue_loc (loc, tem);
10050
10051 return fold_convert_loc (loc, type, tem);
10052 }
10053
10054 if (TREE_CODE_CLASS (code) == tcc_binary
10055 || TREE_CODE_CLASS (code) == tcc_comparison)
10056 {
10057 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10058 {
10059 tem = fold_build2_loc (loc, code, type,
10060 fold_convert_loc (loc, TREE_TYPE (op0),
10061 TREE_OPERAND (arg0, 1)), op1);
10062 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10063 tem);
10064 }
10065 if (TREE_CODE (arg1) == COMPOUND_EXPR
10066 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10067 {
10068 tem = fold_build2_loc (loc, code, type, op0,
10069 fold_convert_loc (loc, TREE_TYPE (op1),
10070 TREE_OPERAND (arg1, 1)));
10071 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10072 tem);
10073 }
10074
10075 if (TREE_CODE (arg0) == COND_EXPR
10076 || TREE_CODE (arg0) == VEC_COND_EXPR
10077 || COMPARISON_CLASS_P (arg0))
10078 {
10079 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10080 arg0, arg1,
10081 /*cond_first_p=*/1);
10082 if (tem != NULL_TREE)
10083 return tem;
10084 }
10085
10086 if (TREE_CODE (arg1) == COND_EXPR
10087 || TREE_CODE (arg1) == VEC_COND_EXPR
10088 || COMPARISON_CLASS_P (arg1))
10089 {
10090 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10091 arg1, arg0,
10092 /*cond_first_p=*/0);
10093 if (tem != NULL_TREE)
10094 return tem;
10095 }
10096 }
10097
10098 switch (code)
10099 {
10100 case MEM_REF:
10101 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10102 if (TREE_CODE (arg0) == ADDR_EXPR
10103 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10104 {
10105 tree iref = TREE_OPERAND (arg0, 0);
10106 return fold_build2 (MEM_REF, type,
10107 TREE_OPERAND (iref, 0),
10108 int_const_binop (PLUS_EXPR, arg1,
10109 TREE_OPERAND (iref, 1)));
10110 }
10111
10112 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10113 if (TREE_CODE (arg0) == ADDR_EXPR
10114 && handled_component_p (TREE_OPERAND (arg0, 0)))
10115 {
10116 tree base;
10117 HOST_WIDE_INT coffset;
10118 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10119 &coffset);
10120 if (!base)
10121 return NULL_TREE;
10122 return fold_build2 (MEM_REF, type,
10123 build_fold_addr_expr (base),
10124 int_const_binop (PLUS_EXPR, arg1,
10125 size_int (coffset)));
10126 }
10127
10128 return NULL_TREE;
10129
10130 case POINTER_PLUS_EXPR:
10131 /* 0 +p index -> (type)index */
10132 if (integer_zerop (arg0))
10133 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10134
10135 /* PTR +p 0 -> PTR */
10136 if (integer_zerop (arg1))
10137 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10138
10139 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10140 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10141 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10142 return fold_convert_loc (loc, type,
10143 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10144 fold_convert_loc (loc, sizetype,
10145 arg1),
10146 fold_convert_loc (loc, sizetype,
10147 arg0)));
10148
10149 /* (PTR +p B) +p A -> PTR +p (B + A) */
10150 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10151 {
10152 tree inner;
10153 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10154 tree arg00 = TREE_OPERAND (arg0, 0);
10155 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10156 arg01, fold_convert_loc (loc, sizetype, arg1));
10157 return fold_convert_loc (loc, type,
10158 fold_build_pointer_plus_loc (loc,
10159 arg00, inner));
10160 }
10161
10162 /* PTR_CST +p CST -> CST1 */
10163 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10164 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10165 fold_convert_loc (loc, type, arg1));
10166
10167 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10168 of the array. Loop optimizer sometimes produce this type of
10169 expressions. */
10170 if (TREE_CODE (arg0) == ADDR_EXPR)
10171 {
10172 tem = try_move_mult_to_index (loc, arg0,
10173 fold_convert_loc (loc,
10174 ssizetype, arg1));
10175 if (tem)
10176 return fold_convert_loc (loc, type, tem);
10177 }
10178
10179 return NULL_TREE;
10180
10181 case PLUS_EXPR:
10182 /* A + (-B) -> A - B */
10183 if (TREE_CODE (arg1) == NEGATE_EXPR)
10184 return fold_build2_loc (loc, MINUS_EXPR, type,
10185 fold_convert_loc (loc, type, arg0),
10186 fold_convert_loc (loc, type,
10187 TREE_OPERAND (arg1, 0)));
10188 /* (-A) + B -> B - A */
10189 if (TREE_CODE (arg0) == NEGATE_EXPR
10190 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10191 return fold_build2_loc (loc, MINUS_EXPR, type,
10192 fold_convert_loc (loc, type, arg1),
10193 fold_convert_loc (loc, type,
10194 TREE_OPERAND (arg0, 0)));
10195
10196 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10197 {
10198 /* Convert ~A + 1 to -A. */
10199 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10200 && integer_onep (arg1))
10201 return fold_build1_loc (loc, NEGATE_EXPR, type,
10202 fold_convert_loc (loc, type,
10203 TREE_OPERAND (arg0, 0)));
10204
10205 /* ~X + X is -1. */
10206 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10207 && !TYPE_OVERFLOW_TRAPS (type))
10208 {
10209 tree tem = TREE_OPERAND (arg0, 0);
10210
10211 STRIP_NOPS (tem);
10212 if (operand_equal_p (tem, arg1, 0))
10213 {
10214 t1 = build_all_ones_cst (type);
10215 return omit_one_operand_loc (loc, type, t1, arg1);
10216 }
10217 }
10218
10219 /* X + ~X is -1. */
10220 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10221 && !TYPE_OVERFLOW_TRAPS (type))
10222 {
10223 tree tem = TREE_OPERAND (arg1, 0);
10224
10225 STRIP_NOPS (tem);
10226 if (operand_equal_p (arg0, tem, 0))
10227 {
10228 t1 = build_all_ones_cst (type);
10229 return omit_one_operand_loc (loc, type, t1, arg0);
10230 }
10231 }
10232
10233 /* X + (X / CST) * -CST is X % CST. */
10234 if (TREE_CODE (arg1) == MULT_EXPR
10235 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10236 && operand_equal_p (arg0,
10237 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10238 {
10239 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10240 tree cst1 = TREE_OPERAND (arg1, 1);
10241 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10242 cst1, cst0);
10243 if (sum && integer_zerop (sum))
10244 return fold_convert_loc (loc, type,
10245 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10246 TREE_TYPE (arg0), arg0,
10247 cst0));
10248 }
10249 }
10250
10251 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10252 one. Make sure the type is not saturating and has the signedness of
10253 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10254 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10255 if ((TREE_CODE (arg0) == MULT_EXPR
10256 || TREE_CODE (arg1) == MULT_EXPR)
10257 && !TYPE_SATURATING (type)
10258 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10259 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10260 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10261 {
10262 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10263 if (tem)
10264 return tem;
10265 }
10266
10267 if (! FLOAT_TYPE_P (type))
10268 {
10269 if (integer_zerop (arg1))
10270 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10271
10272 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10273 with a constant, and the two constants have no bits in common,
10274 we should treat this as a BIT_IOR_EXPR since this may produce more
10275 simplifications. */
10276 if (TREE_CODE (arg0) == BIT_AND_EXPR
10277 && TREE_CODE (arg1) == BIT_AND_EXPR
10278 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10279 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10280 && integer_zerop (const_binop (BIT_AND_EXPR,
10281 TREE_OPERAND (arg0, 1),
10282 TREE_OPERAND (arg1, 1))))
10283 {
10284 code = BIT_IOR_EXPR;
10285 goto bit_ior;
10286 }
10287
10288 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10289 (plus (plus (mult) (mult)) (foo)) so that we can
10290 take advantage of the factoring cases below. */
10291 if (TYPE_OVERFLOW_WRAPS (type)
10292 && (((TREE_CODE (arg0) == PLUS_EXPR
10293 || TREE_CODE (arg0) == MINUS_EXPR)
10294 && TREE_CODE (arg1) == MULT_EXPR)
10295 || ((TREE_CODE (arg1) == PLUS_EXPR
10296 || TREE_CODE (arg1) == MINUS_EXPR)
10297 && TREE_CODE (arg0) == MULT_EXPR)))
10298 {
10299 tree parg0, parg1, parg, marg;
10300 enum tree_code pcode;
10301
10302 if (TREE_CODE (arg1) == MULT_EXPR)
10303 parg = arg0, marg = arg1;
10304 else
10305 parg = arg1, marg = arg0;
10306 pcode = TREE_CODE (parg);
10307 parg0 = TREE_OPERAND (parg, 0);
10308 parg1 = TREE_OPERAND (parg, 1);
10309 STRIP_NOPS (parg0);
10310 STRIP_NOPS (parg1);
10311
10312 if (TREE_CODE (parg0) == MULT_EXPR
10313 && TREE_CODE (parg1) != MULT_EXPR)
10314 return fold_build2_loc (loc, pcode, type,
10315 fold_build2_loc (loc, PLUS_EXPR, type,
10316 fold_convert_loc (loc, type,
10317 parg0),
10318 fold_convert_loc (loc, type,
10319 marg)),
10320 fold_convert_loc (loc, type, parg1));
10321 if (TREE_CODE (parg0) != MULT_EXPR
10322 && TREE_CODE (parg1) == MULT_EXPR)
10323 return
10324 fold_build2_loc (loc, PLUS_EXPR, type,
10325 fold_convert_loc (loc, type, parg0),
10326 fold_build2_loc (loc, pcode, type,
10327 fold_convert_loc (loc, type, marg),
10328 fold_convert_loc (loc, type,
10329 parg1)));
10330 }
10331 }
10332 else
10333 {
10334 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10335 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10336 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10337
10338 /* Likewise if the operands are reversed. */
10339 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10340 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10341
10342 /* Convert X + -C into X - C. */
10343 if (TREE_CODE (arg1) == REAL_CST
10344 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10345 {
10346 tem = fold_negate_const (arg1, type);
10347 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10348 return fold_build2_loc (loc, MINUS_EXPR, type,
10349 fold_convert_loc (loc, type, arg0),
10350 fold_convert_loc (loc, type, tem));
10351 }
10352
10353 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10354 to __complex__ ( x, y ). This is not the same for SNaNs or
10355 if signed zeros are involved. */
10356 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10357 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10358 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10359 {
10360 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10361 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10362 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10363 bool arg0rz = false, arg0iz = false;
10364 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10365 || (arg0i && (arg0iz = real_zerop (arg0i))))
10366 {
10367 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10368 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10369 if (arg0rz && arg1i && real_zerop (arg1i))
10370 {
10371 tree rp = arg1r ? arg1r
10372 : build1 (REALPART_EXPR, rtype, arg1);
10373 tree ip = arg0i ? arg0i
10374 : build1 (IMAGPART_EXPR, rtype, arg0);
10375 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10376 }
10377 else if (arg0iz && arg1r && real_zerop (arg1r))
10378 {
10379 tree rp = arg0r ? arg0r
10380 : build1 (REALPART_EXPR, rtype, arg0);
10381 tree ip = arg1i ? arg1i
10382 : build1 (IMAGPART_EXPR, rtype, arg1);
10383 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10384 }
10385 }
10386 }
10387
10388 if (flag_unsafe_math_optimizations
10389 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10390 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10391 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10392 return tem;
10393
10394 /* Convert x+x into x*2.0. */
10395 if (operand_equal_p (arg0, arg1, 0)
10396 && SCALAR_FLOAT_TYPE_P (type))
10397 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10398 build_real (type, dconst2));
10399
10400 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10401 We associate floats only if the user has specified
10402 -fassociative-math. */
10403 if (flag_associative_math
10404 && TREE_CODE (arg1) == PLUS_EXPR
10405 && TREE_CODE (arg0) != MULT_EXPR)
10406 {
10407 tree tree10 = TREE_OPERAND (arg1, 0);
10408 tree tree11 = TREE_OPERAND (arg1, 1);
10409 if (TREE_CODE (tree11) == MULT_EXPR
10410 && TREE_CODE (tree10) == MULT_EXPR)
10411 {
10412 tree tree0;
10413 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10414 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10415 }
10416 }
10417 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10418 We associate floats only if the user has specified
10419 -fassociative-math. */
10420 if (flag_associative_math
10421 && TREE_CODE (arg0) == PLUS_EXPR
10422 && TREE_CODE (arg1) != MULT_EXPR)
10423 {
10424 tree tree00 = TREE_OPERAND (arg0, 0);
10425 tree tree01 = TREE_OPERAND (arg0, 1);
10426 if (TREE_CODE (tree01) == MULT_EXPR
10427 && TREE_CODE (tree00) == MULT_EXPR)
10428 {
10429 tree tree0;
10430 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10431 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10432 }
10433 }
10434 }
10435
10436 bit_rotate:
10437 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10438 is a rotate of A by C1 bits. */
10439 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10440 is a rotate of A by B bits. */
10441 {
10442 enum tree_code code0, code1;
10443 tree rtype;
10444 code0 = TREE_CODE (arg0);
10445 code1 = TREE_CODE (arg1);
10446 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10447 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10448 && operand_equal_p (TREE_OPERAND (arg0, 0),
10449 TREE_OPERAND (arg1, 0), 0)
10450 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10451 TYPE_UNSIGNED (rtype))
10452 /* Only create rotates in complete modes. Other cases are not
10453 expanded properly. */
10454 && (element_precision (rtype)
10455 == element_precision (TYPE_MODE (rtype))))
10456 {
10457 tree tree01, tree11;
10458 enum tree_code code01, code11;
10459
10460 tree01 = TREE_OPERAND (arg0, 1);
10461 tree11 = TREE_OPERAND (arg1, 1);
10462 STRIP_NOPS (tree01);
10463 STRIP_NOPS (tree11);
10464 code01 = TREE_CODE (tree01);
10465 code11 = TREE_CODE (tree11);
10466 if (code01 == INTEGER_CST
10467 && code11 == INTEGER_CST
10468 && TREE_INT_CST_HIGH (tree01) == 0
10469 && TREE_INT_CST_HIGH (tree11) == 0
10470 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10471 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10472 {
10473 tem = build2_loc (loc, LROTATE_EXPR,
10474 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10475 TREE_OPERAND (arg0, 0),
10476 code0 == LSHIFT_EXPR ? tree01 : tree11);
10477 return fold_convert_loc (loc, type, tem);
10478 }
10479 else if (code11 == MINUS_EXPR)
10480 {
10481 tree tree110, tree111;
10482 tree110 = TREE_OPERAND (tree11, 0);
10483 tree111 = TREE_OPERAND (tree11, 1);
10484 STRIP_NOPS (tree110);
10485 STRIP_NOPS (tree111);
10486 if (TREE_CODE (tree110) == INTEGER_CST
10487 && 0 == compare_tree_int (tree110,
10488 element_precision
10489 (TREE_TYPE (TREE_OPERAND
10490 (arg0, 0))))
10491 && operand_equal_p (tree01, tree111, 0))
10492 return
10493 fold_convert_loc (loc, type,
10494 build2 ((code0 == LSHIFT_EXPR
10495 ? LROTATE_EXPR
10496 : RROTATE_EXPR),
10497 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10498 TREE_OPERAND (arg0, 0), tree01));
10499 }
10500 else if (code01 == MINUS_EXPR)
10501 {
10502 tree tree010, tree011;
10503 tree010 = TREE_OPERAND (tree01, 0);
10504 tree011 = TREE_OPERAND (tree01, 1);
10505 STRIP_NOPS (tree010);
10506 STRIP_NOPS (tree011);
10507 if (TREE_CODE (tree010) == INTEGER_CST
10508 && 0 == compare_tree_int (tree010,
10509 element_precision
10510 (TREE_TYPE (TREE_OPERAND
10511 (arg0, 0))))
10512 && operand_equal_p (tree11, tree011, 0))
10513 return fold_convert_loc
10514 (loc, type,
10515 build2 ((code0 != LSHIFT_EXPR
10516 ? LROTATE_EXPR
10517 : RROTATE_EXPR),
10518 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10519 TREE_OPERAND (arg0, 0), tree11));
10520 }
10521 }
10522 }
10523
10524 associate:
10525 /* In most languages, can't associate operations on floats through
10526 parentheses. Rather than remember where the parentheses were, we
10527 don't associate floats at all, unless the user has specified
10528 -fassociative-math.
10529 And, we need to make sure type is not saturating. */
10530
10531 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10532 && !TYPE_SATURATING (type))
10533 {
10534 tree var0, con0, lit0, minus_lit0;
10535 tree var1, con1, lit1, minus_lit1;
10536 tree atype = type;
10537 bool ok = true;
10538
10539 /* Split both trees into variables, constants, and literals. Then
10540 associate each group together, the constants with literals,
10541 then the result with variables. This increases the chances of
10542 literals being recombined later and of generating relocatable
10543 expressions for the sum of a constant and literal. */
10544 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10545 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10546 code == MINUS_EXPR);
10547
10548 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10549 if (code == MINUS_EXPR)
10550 code = PLUS_EXPR;
10551
10552 /* With undefined overflow prefer doing association in a type
10553 which wraps on overflow, if that is one of the operand types. */
10554 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10555 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10556 {
10557 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10558 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10559 atype = TREE_TYPE (arg0);
10560 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10561 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10562 atype = TREE_TYPE (arg1);
10563 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10564 }
10565
10566 /* With undefined overflow we can only associate constants with one
10567 variable, and constants whose association doesn't overflow. */
10568 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10569 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10570 {
10571 if (var0 && var1)
10572 {
10573 tree tmp0 = var0;
10574 tree tmp1 = var1;
10575
10576 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10577 tmp0 = TREE_OPERAND (tmp0, 0);
10578 if (CONVERT_EXPR_P (tmp0)
10579 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10580 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10581 <= TYPE_PRECISION (atype)))
10582 tmp0 = TREE_OPERAND (tmp0, 0);
10583 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10584 tmp1 = TREE_OPERAND (tmp1, 0);
10585 if (CONVERT_EXPR_P (tmp1)
10586 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10587 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10588 <= TYPE_PRECISION (atype)))
10589 tmp1 = TREE_OPERAND (tmp1, 0);
10590 /* The only case we can still associate with two variables
10591 is if they are the same, modulo negation and bit-pattern
10592 preserving conversions. */
10593 if (!operand_equal_p (tmp0, tmp1, 0))
10594 ok = false;
10595 }
10596 }
10597
10598 /* Only do something if we found more than two objects. Otherwise,
10599 nothing has changed and we risk infinite recursion. */
10600 if (ok
10601 && (2 < ((var0 != 0) + (var1 != 0)
10602 + (con0 != 0) + (con1 != 0)
10603 + (lit0 != 0) + (lit1 != 0)
10604 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10605 {
10606 bool any_overflows = false;
10607 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10608 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10609 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10610 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10611 var0 = associate_trees (loc, var0, var1, code, atype);
10612 con0 = associate_trees (loc, con0, con1, code, atype);
10613 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10614 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10615 code, atype);
10616
10617 /* Preserve the MINUS_EXPR if the negative part of the literal is
10618 greater than the positive part. Otherwise, the multiplicative
10619 folding code (i.e extract_muldiv) may be fooled in case
10620 unsigned constants are subtracted, like in the following
10621 example: ((X*2 + 4) - 8U)/2. */
10622 if (minus_lit0 && lit0)
10623 {
10624 if (TREE_CODE (lit0) == INTEGER_CST
10625 && TREE_CODE (minus_lit0) == INTEGER_CST
10626 && tree_int_cst_lt (lit0, minus_lit0))
10627 {
10628 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10629 MINUS_EXPR, atype);
10630 lit0 = 0;
10631 }
10632 else
10633 {
10634 lit0 = associate_trees (loc, lit0, minus_lit0,
10635 MINUS_EXPR, atype);
10636 minus_lit0 = 0;
10637 }
10638 }
10639
10640 /* Don't introduce overflows through reassociation. */
10641 if (!any_overflows
10642 && ((lit0 && TREE_OVERFLOW (lit0))
10643 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10644 return NULL_TREE;
10645
10646 if (minus_lit0)
10647 {
10648 if (con0 == 0)
10649 return
10650 fold_convert_loc (loc, type,
10651 associate_trees (loc, var0, minus_lit0,
10652 MINUS_EXPR, atype));
10653 else
10654 {
10655 con0 = associate_trees (loc, con0, minus_lit0,
10656 MINUS_EXPR, atype);
10657 return
10658 fold_convert_loc (loc, type,
10659 associate_trees (loc, var0, con0,
10660 PLUS_EXPR, atype));
10661 }
10662 }
10663
10664 con0 = associate_trees (loc, con0, lit0, code, atype);
10665 return
10666 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10667 code, atype));
10668 }
10669 }
10670
10671 return NULL_TREE;
10672
10673 case MINUS_EXPR:
10674 /* Pointer simplifications for subtraction, simple reassociations. */
10675 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10676 {
10677 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10678 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10679 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10680 {
10681 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10682 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10683 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10684 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10685 return fold_build2_loc (loc, PLUS_EXPR, type,
10686 fold_build2_loc (loc, MINUS_EXPR, type,
10687 arg00, arg10),
10688 fold_build2_loc (loc, MINUS_EXPR, type,
10689 arg01, arg11));
10690 }
10691 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10692 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10693 {
10694 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10695 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10696 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10697 fold_convert_loc (loc, type, arg1));
10698 if (tmp)
10699 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10700 }
10701 }
10702 /* A - (-B) -> A + B */
10703 if (TREE_CODE (arg1) == NEGATE_EXPR)
10704 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10705 fold_convert_loc (loc, type,
10706 TREE_OPERAND (arg1, 0)));
10707 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10708 if (TREE_CODE (arg0) == NEGATE_EXPR
10709 && negate_expr_p (arg1)
10710 && reorder_operands_p (arg0, arg1))
10711 return fold_build2_loc (loc, MINUS_EXPR, type,
10712 fold_convert_loc (loc, type,
10713 negate_expr (arg1)),
10714 fold_convert_loc (loc, type,
10715 TREE_OPERAND (arg0, 0)));
10716 /* Convert -A - 1 to ~A. */
10717 if (TREE_CODE (type) != COMPLEX_TYPE
10718 && TREE_CODE (arg0) == NEGATE_EXPR
10719 && integer_onep (arg1)
10720 && !TYPE_OVERFLOW_TRAPS (type))
10721 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10722 fold_convert_loc (loc, type,
10723 TREE_OPERAND (arg0, 0)));
10724
10725 /* Convert -1 - A to ~A. */
10726 if (TREE_CODE (type) != COMPLEX_TYPE
10727 && integer_all_onesp (arg0))
10728 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10729
10730
10731 /* X - (X / Y) * Y is X % Y. */
10732 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10733 && TREE_CODE (arg1) == MULT_EXPR
10734 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10735 && operand_equal_p (arg0,
10736 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10737 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10738 TREE_OPERAND (arg1, 1), 0))
10739 return
10740 fold_convert_loc (loc, type,
10741 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10742 arg0, TREE_OPERAND (arg1, 1)));
10743
10744 if (! FLOAT_TYPE_P (type))
10745 {
10746 if (integer_zerop (arg0))
10747 return negate_expr (fold_convert_loc (loc, type, arg1));
10748 if (integer_zerop (arg1))
10749 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10750
10751 /* Fold A - (A & B) into ~B & A. */
10752 if (!TREE_SIDE_EFFECTS (arg0)
10753 && TREE_CODE (arg1) == BIT_AND_EXPR)
10754 {
10755 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10756 {
10757 tree arg10 = fold_convert_loc (loc, type,
10758 TREE_OPERAND (arg1, 0));
10759 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10760 fold_build1_loc (loc, BIT_NOT_EXPR,
10761 type, arg10),
10762 fold_convert_loc (loc, type, arg0));
10763 }
10764 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10765 {
10766 tree arg11 = fold_convert_loc (loc,
10767 type, TREE_OPERAND (arg1, 1));
10768 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10769 fold_build1_loc (loc, BIT_NOT_EXPR,
10770 type, arg11),
10771 fold_convert_loc (loc, type, arg0));
10772 }
10773 }
10774
10775 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10776 any power of 2 minus 1. */
10777 if (TREE_CODE (arg0) == BIT_AND_EXPR
10778 && TREE_CODE (arg1) == BIT_AND_EXPR
10779 && operand_equal_p (TREE_OPERAND (arg0, 0),
10780 TREE_OPERAND (arg1, 0), 0))
10781 {
10782 tree mask0 = TREE_OPERAND (arg0, 1);
10783 tree mask1 = TREE_OPERAND (arg1, 1);
10784 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10785
10786 if (operand_equal_p (tem, mask1, 0))
10787 {
10788 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10789 TREE_OPERAND (arg0, 0), mask1);
10790 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10791 }
10792 }
10793 }
10794
10795 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10796 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10797 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10798
10799 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10800 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10801 (-ARG1 + ARG0) reduces to -ARG1. */
10802 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10803 return negate_expr (fold_convert_loc (loc, type, arg1));
10804
10805 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10806 __complex__ ( x, -y ). This is not the same for SNaNs or if
10807 signed zeros are involved. */
10808 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10809 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10810 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10811 {
10812 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10813 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10814 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10815 bool arg0rz = false, arg0iz = false;
10816 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10817 || (arg0i && (arg0iz = real_zerop (arg0i))))
10818 {
10819 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10820 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10821 if (arg0rz && arg1i && real_zerop (arg1i))
10822 {
10823 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10824 arg1r ? arg1r
10825 : build1 (REALPART_EXPR, rtype, arg1));
10826 tree ip = arg0i ? arg0i
10827 : build1 (IMAGPART_EXPR, rtype, arg0);
10828 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10829 }
10830 else if (arg0iz && arg1r && real_zerop (arg1r))
10831 {
10832 tree rp = arg0r ? arg0r
10833 : build1 (REALPART_EXPR, rtype, arg0);
10834 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10835 arg1i ? arg1i
10836 : build1 (IMAGPART_EXPR, rtype, arg1));
10837 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10838 }
10839 }
10840 }
10841
10842 /* Fold &x - &x. This can happen from &x.foo - &x.
10843 This is unsafe for certain floats even in non-IEEE formats.
10844 In IEEE, it is unsafe because it does wrong for NaNs.
10845 Also note that operand_equal_p is always false if an operand
10846 is volatile. */
10847
10848 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10849 && operand_equal_p (arg0, arg1, 0))
10850 return build_zero_cst (type);
10851
10852 /* A - B -> A + (-B) if B is easily negatable. */
10853 if (negate_expr_p (arg1)
10854 && ((FLOAT_TYPE_P (type)
10855 /* Avoid this transformation if B is a positive REAL_CST. */
10856 && (TREE_CODE (arg1) != REAL_CST
10857 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10858 || INTEGRAL_TYPE_P (type)))
10859 return fold_build2_loc (loc, PLUS_EXPR, type,
10860 fold_convert_loc (loc, type, arg0),
10861 fold_convert_loc (loc, type,
10862 negate_expr (arg1)));
10863
10864 /* Try folding difference of addresses. */
10865 {
10866 HOST_WIDE_INT diff;
10867
10868 if ((TREE_CODE (arg0) == ADDR_EXPR
10869 || TREE_CODE (arg1) == ADDR_EXPR)
10870 && ptr_difference_const (arg0, arg1, &diff))
10871 return build_int_cst_type (type, diff);
10872 }
10873
10874 /* Fold &a[i] - &a[j] to i-j. */
10875 if (TREE_CODE (arg0) == ADDR_EXPR
10876 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10877 && TREE_CODE (arg1) == ADDR_EXPR
10878 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10879 {
10880 tree tem = fold_addr_of_array_ref_difference (loc, type,
10881 TREE_OPERAND (arg0, 0),
10882 TREE_OPERAND (arg1, 0));
10883 if (tem)
10884 return tem;
10885 }
10886
10887 if (FLOAT_TYPE_P (type)
10888 && flag_unsafe_math_optimizations
10889 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10890 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10891 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10892 return tem;
10893
10894 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10895 one. Make sure the type is not saturating and has the signedness of
10896 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10897 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10898 if ((TREE_CODE (arg0) == MULT_EXPR
10899 || TREE_CODE (arg1) == MULT_EXPR)
10900 && !TYPE_SATURATING (type)
10901 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10902 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10903 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10904 {
10905 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10906 if (tem)
10907 return tem;
10908 }
10909
10910 goto associate;
10911
10912 case MULT_EXPR:
10913 /* (-A) * (-B) -> A * B */
10914 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10915 return fold_build2_loc (loc, MULT_EXPR, type,
10916 fold_convert_loc (loc, type,
10917 TREE_OPERAND (arg0, 0)),
10918 fold_convert_loc (loc, type,
10919 negate_expr (arg1)));
10920 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10921 return fold_build2_loc (loc, MULT_EXPR, type,
10922 fold_convert_loc (loc, type,
10923 negate_expr (arg0)),
10924 fold_convert_loc (loc, type,
10925 TREE_OPERAND (arg1, 0)));
10926
10927 if (! FLOAT_TYPE_P (type))
10928 {
10929 if (integer_zerop (arg1))
10930 return omit_one_operand_loc (loc, type, arg1, arg0);
10931 if (integer_onep (arg1))
10932 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10933 /* Transform x * -1 into -x. Make sure to do the negation
10934 on the original operand with conversions not stripped
10935 because we can only strip non-sign-changing conversions. */
10936 if (integer_minus_onep (arg1))
10937 return fold_convert_loc (loc, type, negate_expr (op0));
10938 /* Transform x * -C into -x * C if x is easily negatable. */
10939 if (TREE_CODE (arg1) == INTEGER_CST
10940 && tree_int_cst_sgn (arg1) == -1
10941 && negate_expr_p (arg0)
10942 && (tem = negate_expr (arg1)) != arg1
10943 && !TREE_OVERFLOW (tem))
10944 return fold_build2_loc (loc, MULT_EXPR, type,
10945 fold_convert_loc (loc, type,
10946 negate_expr (arg0)),
10947 tem);
10948
10949 /* (a * (1 << b)) is (a << b) */
10950 if (TREE_CODE (arg1) == LSHIFT_EXPR
10951 && integer_onep (TREE_OPERAND (arg1, 0)))
10952 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10953 TREE_OPERAND (arg1, 1));
10954 if (TREE_CODE (arg0) == LSHIFT_EXPR
10955 && integer_onep (TREE_OPERAND (arg0, 0)))
10956 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10957 TREE_OPERAND (arg0, 1));
10958
10959 /* (A + A) * C -> A * 2 * C */
10960 if (TREE_CODE (arg0) == PLUS_EXPR
10961 && TREE_CODE (arg1) == INTEGER_CST
10962 && operand_equal_p (TREE_OPERAND (arg0, 0),
10963 TREE_OPERAND (arg0, 1), 0))
10964 return fold_build2_loc (loc, MULT_EXPR, type,
10965 omit_one_operand_loc (loc, type,
10966 TREE_OPERAND (arg0, 0),
10967 TREE_OPERAND (arg0, 1)),
10968 fold_build2_loc (loc, MULT_EXPR, type,
10969 build_int_cst (type, 2) , arg1));
10970
10971 strict_overflow_p = false;
10972 if (TREE_CODE (arg1) == INTEGER_CST
10973 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10974 &strict_overflow_p)))
10975 {
10976 if (strict_overflow_p)
10977 fold_overflow_warning (("assuming signed overflow does not "
10978 "occur when simplifying "
10979 "multiplication"),
10980 WARN_STRICT_OVERFLOW_MISC);
10981 return fold_convert_loc (loc, type, tem);
10982 }
10983
10984 /* Optimize z * conj(z) for integer complex numbers. */
10985 if (TREE_CODE (arg0) == CONJ_EXPR
10986 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10987 return fold_mult_zconjz (loc, type, arg1);
10988 if (TREE_CODE (arg1) == CONJ_EXPR
10989 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10990 return fold_mult_zconjz (loc, type, arg0);
10991 }
10992 else
10993 {
10994 /* Maybe fold x * 0 to 0. The expressions aren't the same
10995 when x is NaN, since x * 0 is also NaN. Nor are they the
10996 same in modes with signed zeros, since multiplying a
10997 negative value by 0 gives -0, not +0. */
10998 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10999 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11000 && real_zerop (arg1))
11001 return omit_one_operand_loc (loc, type, arg1, arg0);
11002 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11003 Likewise for complex arithmetic with signed zeros. */
11004 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11005 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11006 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11007 && real_onep (arg1))
11008 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11009
11010 /* Transform x * -1.0 into -x. */
11011 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11012 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11013 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11014 && real_minus_onep (arg1))
11015 return fold_convert_loc (loc, type, negate_expr (arg0));
11016
11017 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11018 the result for floating point types due to rounding so it is applied
11019 only if -fassociative-math was specify. */
11020 if (flag_associative_math
11021 && TREE_CODE (arg0) == RDIV_EXPR
11022 && TREE_CODE (arg1) == REAL_CST
11023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11024 {
11025 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11026 arg1);
11027 if (tem)
11028 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11029 TREE_OPERAND (arg0, 1));
11030 }
11031
11032 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11033 if (operand_equal_p (arg0, arg1, 0))
11034 {
11035 tree tem = fold_strip_sign_ops (arg0);
11036 if (tem != NULL_TREE)
11037 {
11038 tem = fold_convert_loc (loc, type, tem);
11039 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11040 }
11041 }
11042
11043 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11044 This is not the same for NaNs or if signed zeros are
11045 involved. */
11046 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11047 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11048 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11049 && TREE_CODE (arg1) == COMPLEX_CST
11050 && real_zerop (TREE_REALPART (arg1)))
11051 {
11052 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11053 if (real_onep (TREE_IMAGPART (arg1)))
11054 return
11055 fold_build2_loc (loc, COMPLEX_EXPR, type,
11056 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11057 rtype, arg0)),
11058 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11059 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11060 return
11061 fold_build2_loc (loc, COMPLEX_EXPR, type,
11062 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11063 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11064 rtype, arg0)));
11065 }
11066
11067 /* Optimize z * conj(z) for floating point complex numbers.
11068 Guarded by flag_unsafe_math_optimizations as non-finite
11069 imaginary components don't produce scalar results. */
11070 if (flag_unsafe_math_optimizations
11071 && TREE_CODE (arg0) == CONJ_EXPR
11072 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11073 return fold_mult_zconjz (loc, type, arg1);
11074 if (flag_unsafe_math_optimizations
11075 && TREE_CODE (arg1) == CONJ_EXPR
11076 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11077 return fold_mult_zconjz (loc, type, arg0);
11078
11079 if (flag_unsafe_math_optimizations)
11080 {
11081 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11082 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11083
11084 /* Optimizations of root(...)*root(...). */
11085 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11086 {
11087 tree rootfn, arg;
11088 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11089 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11090
11091 /* Optimize sqrt(x)*sqrt(x) as x. */
11092 if (BUILTIN_SQRT_P (fcode0)
11093 && operand_equal_p (arg00, arg10, 0)
11094 && ! HONOR_SNANS (TYPE_MODE (type)))
11095 return arg00;
11096
11097 /* Optimize root(x)*root(y) as root(x*y). */
11098 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11099 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11100 return build_call_expr_loc (loc, rootfn, 1, arg);
11101 }
11102
11103 /* Optimize expN(x)*expN(y) as expN(x+y). */
11104 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11105 {
11106 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11107 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11108 CALL_EXPR_ARG (arg0, 0),
11109 CALL_EXPR_ARG (arg1, 0));
11110 return build_call_expr_loc (loc, expfn, 1, arg);
11111 }
11112
11113 /* Optimizations of pow(...)*pow(...). */
11114 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11115 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11116 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11117 {
11118 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11119 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11120 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11121 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11122
11123 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11124 if (operand_equal_p (arg01, arg11, 0))
11125 {
11126 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11127 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11128 arg00, arg10);
11129 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11130 }
11131
11132 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11133 if (operand_equal_p (arg00, arg10, 0))
11134 {
11135 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11136 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11137 arg01, arg11);
11138 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11139 }
11140 }
11141
11142 /* Optimize tan(x)*cos(x) as sin(x). */
11143 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11144 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11145 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11146 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11147 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11148 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11149 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11150 CALL_EXPR_ARG (arg1, 0), 0))
11151 {
11152 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11153
11154 if (sinfn != NULL_TREE)
11155 return build_call_expr_loc (loc, sinfn, 1,
11156 CALL_EXPR_ARG (arg0, 0));
11157 }
11158
11159 /* Optimize x*pow(x,c) as pow(x,c+1). */
11160 if (fcode1 == BUILT_IN_POW
11161 || fcode1 == BUILT_IN_POWF
11162 || fcode1 == BUILT_IN_POWL)
11163 {
11164 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11165 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11166 if (TREE_CODE (arg11) == REAL_CST
11167 && !TREE_OVERFLOW (arg11)
11168 && operand_equal_p (arg0, arg10, 0))
11169 {
11170 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11171 REAL_VALUE_TYPE c;
11172 tree arg;
11173
11174 c = TREE_REAL_CST (arg11);
11175 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11176 arg = build_real (type, c);
11177 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11178 }
11179 }
11180
11181 /* Optimize pow(x,c)*x as pow(x,c+1). */
11182 if (fcode0 == BUILT_IN_POW
11183 || fcode0 == BUILT_IN_POWF
11184 || fcode0 == BUILT_IN_POWL)
11185 {
11186 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11187 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11188 if (TREE_CODE (arg01) == REAL_CST
11189 && !TREE_OVERFLOW (arg01)
11190 && operand_equal_p (arg1, arg00, 0))
11191 {
11192 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11193 REAL_VALUE_TYPE c;
11194 tree arg;
11195
11196 c = TREE_REAL_CST (arg01);
11197 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11198 arg = build_real (type, c);
11199 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11200 }
11201 }
11202
11203 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11204 if (!in_gimple_form
11205 && optimize
11206 && operand_equal_p (arg0, arg1, 0))
11207 {
11208 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11209
11210 if (powfn)
11211 {
11212 tree arg = build_real (type, dconst2);
11213 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11214 }
11215 }
11216 }
11217 }
11218 goto associate;
11219
11220 case BIT_IOR_EXPR:
11221 bit_ior:
11222 if (integer_all_onesp (arg1))
11223 return omit_one_operand_loc (loc, type, arg1, arg0);
11224 if (integer_zerop (arg1))
11225 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11226 if (operand_equal_p (arg0, arg1, 0))
11227 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11228
11229 /* ~X | X is -1. */
11230 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11231 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11232 {
11233 t1 = build_zero_cst (type);
11234 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11235 return omit_one_operand_loc (loc, type, t1, arg1);
11236 }
11237
11238 /* X | ~X is -1. */
11239 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11241 {
11242 t1 = build_zero_cst (type);
11243 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11244 return omit_one_operand_loc (loc, type, t1, arg0);
11245 }
11246
11247 /* Canonicalize (X & C1) | C2. */
11248 if (TREE_CODE (arg0) == BIT_AND_EXPR
11249 && TREE_CODE (arg1) == INTEGER_CST
11250 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11251 {
11252 double_int c1, c2, c3, msk;
11253 int width = TYPE_PRECISION (type), w;
11254 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11255 c2 = tree_to_double_int (arg1);
11256
11257 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11258 if ((c1 & c2) == c1)
11259 return omit_one_operand_loc (loc, type, arg1,
11260 TREE_OPERAND (arg0, 0));
11261
11262 msk = double_int::mask (width);
11263
11264 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11265 if (msk.and_not (c1 | c2).is_zero ())
11266 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11267 TREE_OPERAND (arg0, 0), arg1);
11268
11269 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11270 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11271 mode which allows further optimizations. */
11272 c1 &= msk;
11273 c2 &= msk;
11274 c3 = c1.and_not (c2);
11275 for (w = BITS_PER_UNIT;
11276 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11277 w <<= 1)
11278 {
11279 unsigned HOST_WIDE_INT mask
11280 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11281 if (((c1.low | c2.low) & mask) == mask
11282 && (c1.low & ~mask) == 0 && c1.high == 0)
11283 {
11284 c3 = double_int::from_uhwi (mask);
11285 break;
11286 }
11287 }
11288 if (c3 != c1)
11289 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11290 fold_build2_loc (loc, BIT_AND_EXPR, type,
11291 TREE_OPERAND (arg0, 0),
11292 double_int_to_tree (type,
11293 c3)),
11294 arg1);
11295 }
11296
11297 /* (X & Y) | Y is (X, Y). */
11298 if (TREE_CODE (arg0) == BIT_AND_EXPR
11299 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11300 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11301 /* (X & Y) | X is (Y, X). */
11302 if (TREE_CODE (arg0) == BIT_AND_EXPR
11303 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11304 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11305 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11306 /* X | (X & Y) is (Y, X). */
11307 if (TREE_CODE (arg1) == BIT_AND_EXPR
11308 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11309 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11310 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11311 /* X | (Y & X) is (Y, X). */
11312 if (TREE_CODE (arg1) == BIT_AND_EXPR
11313 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11314 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11315 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11316
11317 /* (X & ~Y) | (~X & Y) is X ^ Y */
11318 if (TREE_CODE (arg0) == BIT_AND_EXPR
11319 && TREE_CODE (arg1) == BIT_AND_EXPR)
11320 {
11321 tree a0, a1, l0, l1, n0, n1;
11322
11323 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11324 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11325
11326 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11327 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11328
11329 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11330 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11331
11332 if ((operand_equal_p (n0, a0, 0)
11333 && operand_equal_p (n1, a1, 0))
11334 || (operand_equal_p (n0, a1, 0)
11335 && operand_equal_p (n1, a0, 0)))
11336 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11337 }
11338
11339 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11340 if (t1 != NULL_TREE)
11341 return t1;
11342
11343 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11344
11345 This results in more efficient code for machines without a NAND
11346 instruction. Combine will canonicalize to the first form
11347 which will allow use of NAND instructions provided by the
11348 backend if they exist. */
11349 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11350 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11351 {
11352 return
11353 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11354 build2 (BIT_AND_EXPR, type,
11355 fold_convert_loc (loc, type,
11356 TREE_OPERAND (arg0, 0)),
11357 fold_convert_loc (loc, type,
11358 TREE_OPERAND (arg1, 0))));
11359 }
11360
11361 /* See if this can be simplified into a rotate first. If that
11362 is unsuccessful continue in the association code. */
11363 goto bit_rotate;
11364
11365 case BIT_XOR_EXPR:
11366 if (integer_zerop (arg1))
11367 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11368 if (integer_all_onesp (arg1))
11369 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11370 if (operand_equal_p (arg0, arg1, 0))
11371 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11372
11373 /* ~X ^ X is -1. */
11374 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11375 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11376 {
11377 t1 = build_zero_cst (type);
11378 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11379 return omit_one_operand_loc (loc, type, t1, arg1);
11380 }
11381
11382 /* X ^ ~X is -1. */
11383 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11384 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11385 {
11386 t1 = build_zero_cst (type);
11387 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11388 return omit_one_operand_loc (loc, type, t1, arg0);
11389 }
11390
11391 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11392 with a constant, and the two constants have no bits in common,
11393 we should treat this as a BIT_IOR_EXPR since this may produce more
11394 simplifications. */
11395 if (TREE_CODE (arg0) == BIT_AND_EXPR
11396 && TREE_CODE (arg1) == BIT_AND_EXPR
11397 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11398 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11399 && integer_zerop (const_binop (BIT_AND_EXPR,
11400 TREE_OPERAND (arg0, 1),
11401 TREE_OPERAND (arg1, 1))))
11402 {
11403 code = BIT_IOR_EXPR;
11404 goto bit_ior;
11405 }
11406
11407 /* (X | Y) ^ X -> Y & ~ X*/
11408 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11409 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11410 {
11411 tree t2 = TREE_OPERAND (arg0, 1);
11412 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11413 arg1);
11414 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11415 fold_convert_loc (loc, type, t2),
11416 fold_convert_loc (loc, type, t1));
11417 return t1;
11418 }
11419
11420 /* (Y | X) ^ X -> Y & ~ X*/
11421 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11422 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11423 {
11424 tree t2 = TREE_OPERAND (arg0, 0);
11425 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11426 arg1);
11427 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11428 fold_convert_loc (loc, type, t2),
11429 fold_convert_loc (loc, type, t1));
11430 return t1;
11431 }
11432
11433 /* X ^ (X | Y) -> Y & ~ X*/
11434 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11435 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11436 {
11437 tree t2 = TREE_OPERAND (arg1, 1);
11438 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11439 arg0);
11440 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11441 fold_convert_loc (loc, type, t2),
11442 fold_convert_loc (loc, type, t1));
11443 return t1;
11444 }
11445
11446 /* X ^ (Y | X) -> Y & ~ X*/
11447 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11448 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11449 {
11450 tree t2 = TREE_OPERAND (arg1, 0);
11451 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11452 arg0);
11453 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11454 fold_convert_loc (loc, type, t2),
11455 fold_convert_loc (loc, type, t1));
11456 return t1;
11457 }
11458
11459 /* Convert ~X ^ ~Y to X ^ Y. */
11460 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11461 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11462 return fold_build2_loc (loc, code, type,
11463 fold_convert_loc (loc, type,
11464 TREE_OPERAND (arg0, 0)),
11465 fold_convert_loc (loc, type,
11466 TREE_OPERAND (arg1, 0)));
11467
11468 /* Convert ~X ^ C to X ^ ~C. */
11469 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11470 && TREE_CODE (arg1) == INTEGER_CST)
11471 return fold_build2_loc (loc, code, type,
11472 fold_convert_loc (loc, type,
11473 TREE_OPERAND (arg0, 0)),
11474 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11475
11476 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11477 if (TREE_CODE (arg0) == BIT_AND_EXPR
11478 && integer_onep (TREE_OPERAND (arg0, 1))
11479 && integer_onep (arg1))
11480 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11481 build_zero_cst (TREE_TYPE (arg0)));
11482
11483 /* Fold (X & Y) ^ Y as ~X & Y. */
11484 if (TREE_CODE (arg0) == BIT_AND_EXPR
11485 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11486 {
11487 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11488 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11489 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11490 fold_convert_loc (loc, type, arg1));
11491 }
11492 /* Fold (X & Y) ^ X as ~Y & X. */
11493 if (TREE_CODE (arg0) == BIT_AND_EXPR
11494 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11495 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11496 {
11497 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11498 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11499 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11500 fold_convert_loc (loc, type, arg1));
11501 }
11502 /* Fold X ^ (X & Y) as X & ~Y. */
11503 if (TREE_CODE (arg1) == BIT_AND_EXPR
11504 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11505 {
11506 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11507 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11508 fold_convert_loc (loc, type, arg0),
11509 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11510 }
11511 /* Fold X ^ (Y & X) as ~Y & X. */
11512 if (TREE_CODE (arg1) == BIT_AND_EXPR
11513 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11514 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11515 {
11516 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11517 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11518 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11519 fold_convert_loc (loc, type, arg0));
11520 }
11521
11522 /* See if this can be simplified into a rotate first. If that
11523 is unsuccessful continue in the association code. */
11524 goto bit_rotate;
11525
11526 case BIT_AND_EXPR:
11527 if (integer_all_onesp (arg1))
11528 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11529 if (integer_zerop (arg1))
11530 return omit_one_operand_loc (loc, type, arg1, arg0);
11531 if (operand_equal_p (arg0, arg1, 0))
11532 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11533
11534 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11535 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11536 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11537 || (TREE_CODE (arg0) == EQ_EXPR
11538 && integer_zerop (TREE_OPERAND (arg0, 1))))
11539 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11540 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11541
11542 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11543 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11544 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11545 || (TREE_CODE (arg1) == EQ_EXPR
11546 && integer_zerop (TREE_OPERAND (arg1, 1))))
11547 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11548 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11549
11550 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11551 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11552 && TREE_CODE (arg1) == INTEGER_CST
11553 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11554 {
11555 tree tmp1 = fold_convert_loc (loc, type, arg1);
11556 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11557 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11558 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11559 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11560 return
11561 fold_convert_loc (loc, type,
11562 fold_build2_loc (loc, BIT_IOR_EXPR,
11563 type, tmp2, tmp3));
11564 }
11565
11566 /* (X | Y) & Y is (X, Y). */
11567 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11568 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11569 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11570 /* (X | Y) & X is (Y, X). */
11571 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11572 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11573 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11574 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11575 /* X & (X | Y) is (Y, X). */
11576 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11577 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11578 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11579 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11580 /* X & (Y | X) is (Y, X). */
11581 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11582 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11583 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11584 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11585
11586 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11587 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11588 && integer_onep (TREE_OPERAND (arg0, 1))
11589 && integer_onep (arg1))
11590 {
11591 tree tem2;
11592 tem = TREE_OPERAND (arg0, 0);
11593 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11594 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11595 tem, tem2);
11596 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11597 build_zero_cst (TREE_TYPE (tem)));
11598 }
11599 /* Fold ~X & 1 as (X & 1) == 0. */
11600 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11601 && integer_onep (arg1))
11602 {
11603 tree tem2;
11604 tem = TREE_OPERAND (arg0, 0);
11605 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11606 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11607 tem, tem2);
11608 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11609 build_zero_cst (TREE_TYPE (tem)));
11610 }
11611 /* Fold !X & 1 as X == 0. */
11612 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11613 && integer_onep (arg1))
11614 {
11615 tem = TREE_OPERAND (arg0, 0);
11616 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11617 build_zero_cst (TREE_TYPE (tem)));
11618 }
11619
11620 /* Fold (X ^ Y) & Y as ~X & Y. */
11621 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11622 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11623 {
11624 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11625 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11626 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11627 fold_convert_loc (loc, type, arg1));
11628 }
11629 /* Fold (X ^ Y) & X as ~Y & X. */
11630 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11631 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11632 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11633 {
11634 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11635 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11636 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11637 fold_convert_loc (loc, type, arg1));
11638 }
11639 /* Fold X & (X ^ Y) as X & ~Y. */
11640 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11641 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11642 {
11643 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11644 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11645 fold_convert_loc (loc, type, arg0),
11646 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11647 }
11648 /* Fold X & (Y ^ X) as ~Y & X. */
11649 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11650 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11651 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11652 {
11653 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11654 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11655 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11656 fold_convert_loc (loc, type, arg0));
11657 }
11658
11659 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11660 multiple of 1 << CST. */
11661 if (TREE_CODE (arg1) == INTEGER_CST)
11662 {
11663 double_int cst1 = tree_to_double_int (arg1);
11664 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11665 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11666 if ((cst1 & ncst1) == ncst1
11667 && multiple_of_p (type, arg0,
11668 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11669 return fold_convert_loc (loc, type, arg0);
11670 }
11671
11672 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11673 bits from CST2. */
11674 if (TREE_CODE (arg1) == INTEGER_CST
11675 && TREE_CODE (arg0) == MULT_EXPR
11676 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11677 {
11678 int arg1tz
11679 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11680 if (arg1tz > 0)
11681 {
11682 double_int arg1mask, masked;
11683 arg1mask = ~double_int::mask (arg1tz);
11684 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11685 TYPE_UNSIGNED (type));
11686 masked = arg1mask & tree_to_double_int (arg1);
11687 if (masked.is_zero ())
11688 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11689 arg0, arg1);
11690 else if (masked != tree_to_double_int (arg1))
11691 return fold_build2_loc (loc, code, type, op0,
11692 double_int_to_tree (type, masked));
11693 }
11694 }
11695
11696 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11697 ((A & N) + B) & M -> (A + B) & M
11698 Similarly if (N & M) == 0,
11699 ((A | N) + B) & M -> (A + B) & M
11700 and for - instead of + (or unary - instead of +)
11701 and/or ^ instead of |.
11702 If B is constant and (B & M) == 0, fold into A & M. */
11703 if (host_integerp (arg1, 1))
11704 {
11705 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11706 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11707 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11708 && (TREE_CODE (arg0) == PLUS_EXPR
11709 || TREE_CODE (arg0) == MINUS_EXPR
11710 || TREE_CODE (arg0) == NEGATE_EXPR)
11711 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11712 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11713 {
11714 tree pmop[2];
11715 int which = 0;
11716 unsigned HOST_WIDE_INT cst0;
11717
11718 /* Now we know that arg0 is (C + D) or (C - D) or
11719 -C and arg1 (M) is == (1LL << cst) - 1.
11720 Store C into PMOP[0] and D into PMOP[1]. */
11721 pmop[0] = TREE_OPERAND (arg0, 0);
11722 pmop[1] = NULL;
11723 if (TREE_CODE (arg0) != NEGATE_EXPR)
11724 {
11725 pmop[1] = TREE_OPERAND (arg0, 1);
11726 which = 1;
11727 }
11728
11729 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11730 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11731 & cst1) != cst1)
11732 which = -1;
11733
11734 for (; which >= 0; which--)
11735 switch (TREE_CODE (pmop[which]))
11736 {
11737 case BIT_AND_EXPR:
11738 case BIT_IOR_EXPR:
11739 case BIT_XOR_EXPR:
11740 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11741 != INTEGER_CST)
11742 break;
11743 /* tree_low_cst not used, because we don't care about
11744 the upper bits. */
11745 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11746 cst0 &= cst1;
11747 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11748 {
11749 if (cst0 != cst1)
11750 break;
11751 }
11752 else if (cst0 != 0)
11753 break;
11754 /* If C or D is of the form (A & N) where
11755 (N & M) == M, or of the form (A | N) or
11756 (A ^ N) where (N & M) == 0, replace it with A. */
11757 pmop[which] = TREE_OPERAND (pmop[which], 0);
11758 break;
11759 case INTEGER_CST:
11760 /* If C or D is a N where (N & M) == 0, it can be
11761 omitted (assumed 0). */
11762 if ((TREE_CODE (arg0) == PLUS_EXPR
11763 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11764 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11765 pmop[which] = NULL;
11766 break;
11767 default:
11768 break;
11769 }
11770
11771 /* Only build anything new if we optimized one or both arguments
11772 above. */
11773 if (pmop[0] != TREE_OPERAND (arg0, 0)
11774 || (TREE_CODE (arg0) != NEGATE_EXPR
11775 && pmop[1] != TREE_OPERAND (arg0, 1)))
11776 {
11777 tree utype = TREE_TYPE (arg0);
11778 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11779 {
11780 /* Perform the operations in a type that has defined
11781 overflow behavior. */
11782 utype = unsigned_type_for (TREE_TYPE (arg0));
11783 if (pmop[0] != NULL)
11784 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11785 if (pmop[1] != NULL)
11786 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11787 }
11788
11789 if (TREE_CODE (arg0) == NEGATE_EXPR)
11790 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11791 else if (TREE_CODE (arg0) == PLUS_EXPR)
11792 {
11793 if (pmop[0] != NULL && pmop[1] != NULL)
11794 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11795 pmop[0], pmop[1]);
11796 else if (pmop[0] != NULL)
11797 tem = pmop[0];
11798 else if (pmop[1] != NULL)
11799 tem = pmop[1];
11800 else
11801 return build_int_cst (type, 0);
11802 }
11803 else if (pmop[0] == NULL)
11804 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11805 else
11806 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11807 pmop[0], pmop[1]);
11808 /* TEM is now the new binary +, - or unary - replacement. */
11809 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11810 fold_convert_loc (loc, utype, arg1));
11811 return fold_convert_loc (loc, type, tem);
11812 }
11813 }
11814 }
11815
11816 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11817 if (t1 != NULL_TREE)
11818 return t1;
11819 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11820 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11821 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11822 {
11823 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11824
11825 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11826 && (~TREE_INT_CST_LOW (arg1)
11827 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11828 return
11829 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11830 }
11831
11832 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11833
11834 This results in more efficient code for machines without a NOR
11835 instruction. Combine will canonicalize to the first form
11836 which will allow use of NOR instructions provided by the
11837 backend if they exist. */
11838 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11839 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11840 {
11841 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11842 build2 (BIT_IOR_EXPR, type,
11843 fold_convert_loc (loc, type,
11844 TREE_OPERAND (arg0, 0)),
11845 fold_convert_loc (loc, type,
11846 TREE_OPERAND (arg1, 0))));
11847 }
11848
11849 /* If arg0 is derived from the address of an object or function, we may
11850 be able to fold this expression using the object or function's
11851 alignment. */
11852 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11853 {
11854 unsigned HOST_WIDE_INT modulus, residue;
11855 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11856
11857 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11858 integer_onep (arg1));
11859
11860 /* This works because modulus is a power of 2. If this weren't the
11861 case, we'd have to replace it by its greatest power-of-2
11862 divisor: modulus & -modulus. */
11863 if (low < modulus)
11864 return build_int_cst (type, residue & low);
11865 }
11866
11867 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11868 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11869 if the new mask might be further optimized. */
11870 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11871 || TREE_CODE (arg0) == RSHIFT_EXPR)
11872 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11873 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11874 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11875 < TYPE_PRECISION (TREE_TYPE (arg0))
11876 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11877 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11878 {
11879 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11880 unsigned HOST_WIDE_INT mask
11881 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11882 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11883 tree shift_type = TREE_TYPE (arg0);
11884
11885 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11886 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11887 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11888 && TYPE_PRECISION (TREE_TYPE (arg0))
11889 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11890 {
11891 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11892 tree arg00 = TREE_OPERAND (arg0, 0);
11893 /* See if more bits can be proven as zero because of
11894 zero extension. */
11895 if (TREE_CODE (arg00) == NOP_EXPR
11896 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11897 {
11898 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11899 if (TYPE_PRECISION (inner_type)
11900 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11901 && TYPE_PRECISION (inner_type) < prec)
11902 {
11903 prec = TYPE_PRECISION (inner_type);
11904 /* See if we can shorten the right shift. */
11905 if (shiftc < prec)
11906 shift_type = inner_type;
11907 }
11908 }
11909 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11910 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11911 zerobits <<= prec - shiftc;
11912 /* For arithmetic shift if sign bit could be set, zerobits
11913 can contain actually sign bits, so no transformation is
11914 possible, unless MASK masks them all away. In that
11915 case the shift needs to be converted into logical shift. */
11916 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11917 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11918 {
11919 if ((mask & zerobits) == 0)
11920 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11921 else
11922 zerobits = 0;
11923 }
11924 }
11925
11926 /* ((X << 16) & 0xff00) is (X, 0). */
11927 if ((mask & zerobits) == mask)
11928 return omit_one_operand_loc (loc, type,
11929 build_int_cst (type, 0), arg0);
11930
11931 newmask = mask | zerobits;
11932 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11933 {
11934 /* Only do the transformation if NEWMASK is some integer
11935 mode's mask. */
11936 for (prec = BITS_PER_UNIT;
11937 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11938 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11939 break;
11940 if (prec < HOST_BITS_PER_WIDE_INT
11941 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11942 {
11943 tree newmaskt;
11944
11945 if (shift_type != TREE_TYPE (arg0))
11946 {
11947 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11948 fold_convert_loc (loc, shift_type,
11949 TREE_OPERAND (arg0, 0)),
11950 TREE_OPERAND (arg0, 1));
11951 tem = fold_convert_loc (loc, type, tem);
11952 }
11953 else
11954 tem = op0;
11955 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11956 if (!tree_int_cst_equal (newmaskt, arg1))
11957 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11958 }
11959 }
11960 }
11961
11962 goto associate;
11963
11964 case RDIV_EXPR:
11965 /* Don't touch a floating-point divide by zero unless the mode
11966 of the constant can represent infinity. */
11967 if (TREE_CODE (arg1) == REAL_CST
11968 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11969 && real_zerop (arg1))
11970 return NULL_TREE;
11971
11972 /* Optimize A / A to 1.0 if we don't care about
11973 NaNs or Infinities. Skip the transformation
11974 for non-real operands. */
11975 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11976 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11977 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11978 && operand_equal_p (arg0, arg1, 0))
11979 {
11980 tree r = build_real (TREE_TYPE (arg0), dconst1);
11981
11982 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11983 }
11984
11985 /* The complex version of the above A / A optimization. */
11986 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11987 && operand_equal_p (arg0, arg1, 0))
11988 {
11989 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11990 if (! HONOR_NANS (TYPE_MODE (elem_type))
11991 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11992 {
11993 tree r = build_real (elem_type, dconst1);
11994 /* omit_two_operands will call fold_convert for us. */
11995 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11996 }
11997 }
11998
11999 /* (-A) / (-B) -> A / B */
12000 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12001 return fold_build2_loc (loc, RDIV_EXPR, type,
12002 TREE_OPERAND (arg0, 0),
12003 negate_expr (arg1));
12004 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12005 return fold_build2_loc (loc, RDIV_EXPR, type,
12006 negate_expr (arg0),
12007 TREE_OPERAND (arg1, 0));
12008
12009 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12010 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12011 && real_onep (arg1))
12012 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12013
12014 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12015 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12016 && real_minus_onep (arg1))
12017 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12018 negate_expr (arg0)));
12019
12020 /* If ARG1 is a constant, we can convert this to a multiply by the
12021 reciprocal. This does not have the same rounding properties,
12022 so only do this if -freciprocal-math. We can actually
12023 always safely do it if ARG1 is a power of two, but it's hard to
12024 tell if it is or not in a portable manner. */
12025 if (optimize
12026 && (TREE_CODE (arg1) == REAL_CST
12027 || (TREE_CODE (arg1) == COMPLEX_CST
12028 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12029 || (TREE_CODE (arg1) == VECTOR_CST
12030 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12031 {
12032 if (flag_reciprocal_math
12033 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12034 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12035 /* Find the reciprocal if optimizing and the result is exact.
12036 TODO: Complex reciprocal not implemented. */
12037 if (TREE_CODE (arg1) != COMPLEX_CST)
12038 {
12039 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12040
12041 if (inverse)
12042 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12043 }
12044 }
12045 /* Convert A/B/C to A/(B*C). */
12046 if (flag_reciprocal_math
12047 && TREE_CODE (arg0) == RDIV_EXPR)
12048 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12049 fold_build2_loc (loc, MULT_EXPR, type,
12050 TREE_OPERAND (arg0, 1), arg1));
12051
12052 /* Convert A/(B/C) to (A/B)*C. */
12053 if (flag_reciprocal_math
12054 && TREE_CODE (arg1) == RDIV_EXPR)
12055 return fold_build2_loc (loc, MULT_EXPR, type,
12056 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12057 TREE_OPERAND (arg1, 0)),
12058 TREE_OPERAND (arg1, 1));
12059
12060 /* Convert C1/(X*C2) into (C1/C2)/X. */
12061 if (flag_reciprocal_math
12062 && TREE_CODE (arg1) == MULT_EXPR
12063 && TREE_CODE (arg0) == REAL_CST
12064 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12065 {
12066 tree tem = const_binop (RDIV_EXPR, arg0,
12067 TREE_OPERAND (arg1, 1));
12068 if (tem)
12069 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12070 TREE_OPERAND (arg1, 0));
12071 }
12072
12073 if (flag_unsafe_math_optimizations)
12074 {
12075 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12076 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12077
12078 /* Optimize sin(x)/cos(x) as tan(x). */
12079 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12080 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12081 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12082 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12083 CALL_EXPR_ARG (arg1, 0), 0))
12084 {
12085 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12086
12087 if (tanfn != NULL_TREE)
12088 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12089 }
12090
12091 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12092 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12093 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12094 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12095 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12096 CALL_EXPR_ARG (arg1, 0), 0))
12097 {
12098 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12099
12100 if (tanfn != NULL_TREE)
12101 {
12102 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12103 CALL_EXPR_ARG (arg0, 0));
12104 return fold_build2_loc (loc, RDIV_EXPR, type,
12105 build_real (type, dconst1), tmp);
12106 }
12107 }
12108
12109 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12110 NaNs or Infinities. */
12111 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12112 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12113 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12114 {
12115 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12116 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12117
12118 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12119 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12120 && operand_equal_p (arg00, arg01, 0))
12121 {
12122 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12123
12124 if (cosfn != NULL_TREE)
12125 return build_call_expr_loc (loc, cosfn, 1, arg00);
12126 }
12127 }
12128
12129 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12130 NaNs or Infinities. */
12131 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12132 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12133 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12134 {
12135 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12136 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12137
12138 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12139 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12140 && operand_equal_p (arg00, arg01, 0))
12141 {
12142 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12143
12144 if (cosfn != NULL_TREE)
12145 {
12146 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12147 return fold_build2_loc (loc, RDIV_EXPR, type,
12148 build_real (type, dconst1),
12149 tmp);
12150 }
12151 }
12152 }
12153
12154 /* Optimize pow(x,c)/x as pow(x,c-1). */
12155 if (fcode0 == BUILT_IN_POW
12156 || fcode0 == BUILT_IN_POWF
12157 || fcode0 == BUILT_IN_POWL)
12158 {
12159 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12160 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12161 if (TREE_CODE (arg01) == REAL_CST
12162 && !TREE_OVERFLOW (arg01)
12163 && operand_equal_p (arg1, arg00, 0))
12164 {
12165 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12166 REAL_VALUE_TYPE c;
12167 tree arg;
12168
12169 c = TREE_REAL_CST (arg01);
12170 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12171 arg = build_real (type, c);
12172 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12173 }
12174 }
12175
12176 /* Optimize a/root(b/c) into a*root(c/b). */
12177 if (BUILTIN_ROOT_P (fcode1))
12178 {
12179 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12180
12181 if (TREE_CODE (rootarg) == RDIV_EXPR)
12182 {
12183 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12184 tree b = TREE_OPERAND (rootarg, 0);
12185 tree c = TREE_OPERAND (rootarg, 1);
12186
12187 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12188
12189 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12190 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12191 }
12192 }
12193
12194 /* Optimize x/expN(y) into x*expN(-y). */
12195 if (BUILTIN_EXPONENT_P (fcode1))
12196 {
12197 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12198 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12199 arg1 = build_call_expr_loc (loc,
12200 expfn, 1,
12201 fold_convert_loc (loc, type, arg));
12202 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12203 }
12204
12205 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12206 if (fcode1 == BUILT_IN_POW
12207 || fcode1 == BUILT_IN_POWF
12208 || fcode1 == BUILT_IN_POWL)
12209 {
12210 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12211 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12212 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12213 tree neg11 = fold_convert_loc (loc, type,
12214 negate_expr (arg11));
12215 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12216 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12217 }
12218 }
12219 return NULL_TREE;
12220
12221 case TRUNC_DIV_EXPR:
12222 /* Optimize (X & (-A)) / A where A is a power of 2,
12223 to X >> log2(A) */
12224 if (TREE_CODE (arg0) == BIT_AND_EXPR
12225 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12226 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12227 {
12228 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12229 arg1, TREE_OPERAND (arg0, 1));
12230 if (sum && integer_zerop (sum)) {
12231 unsigned long pow2;
12232
12233 if (TREE_INT_CST_LOW (arg1))
12234 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12235 else
12236 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12237 + HOST_BITS_PER_WIDE_INT;
12238
12239 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12240 TREE_OPERAND (arg0, 0),
12241 build_int_cst (integer_type_node, pow2));
12242 }
12243 }
12244
12245 /* Fall through */
12246
12247 case FLOOR_DIV_EXPR:
12248 /* Simplify A / (B << N) where A and B are positive and B is
12249 a power of 2, to A >> (N + log2(B)). */
12250 strict_overflow_p = false;
12251 if (TREE_CODE (arg1) == LSHIFT_EXPR
12252 && (TYPE_UNSIGNED (type)
12253 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12254 {
12255 tree sval = TREE_OPERAND (arg1, 0);
12256 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12257 {
12258 tree sh_cnt = TREE_OPERAND (arg1, 1);
12259 unsigned long pow2;
12260
12261 if (TREE_INT_CST_LOW (sval))
12262 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12263 else
12264 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12265 + HOST_BITS_PER_WIDE_INT;
12266
12267 if (strict_overflow_p)
12268 fold_overflow_warning (("assuming signed overflow does not "
12269 "occur when simplifying A / (B << N)"),
12270 WARN_STRICT_OVERFLOW_MISC);
12271
12272 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12273 sh_cnt,
12274 build_int_cst (TREE_TYPE (sh_cnt),
12275 pow2));
12276 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12277 fold_convert_loc (loc, type, arg0), sh_cnt);
12278 }
12279 }
12280
12281 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12282 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12283 if (INTEGRAL_TYPE_P (type)
12284 && TYPE_UNSIGNED (type)
12285 && code == FLOOR_DIV_EXPR)
12286 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12287
12288 /* Fall through */
12289
12290 case ROUND_DIV_EXPR:
12291 case CEIL_DIV_EXPR:
12292 case EXACT_DIV_EXPR:
12293 if (integer_onep (arg1))
12294 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12295 if (integer_zerop (arg1))
12296 return NULL_TREE;
12297 /* X / -1 is -X. */
12298 if (!TYPE_UNSIGNED (type)
12299 && TREE_CODE (arg1) == INTEGER_CST
12300 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12301 && TREE_INT_CST_HIGH (arg1) == -1)
12302 return fold_convert_loc (loc, type, negate_expr (arg0));
12303
12304 /* Convert -A / -B to A / B when the type is signed and overflow is
12305 undefined. */
12306 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12307 && TREE_CODE (arg0) == NEGATE_EXPR
12308 && negate_expr_p (arg1))
12309 {
12310 if (INTEGRAL_TYPE_P (type))
12311 fold_overflow_warning (("assuming signed overflow does not occur "
12312 "when distributing negation across "
12313 "division"),
12314 WARN_STRICT_OVERFLOW_MISC);
12315 return fold_build2_loc (loc, code, type,
12316 fold_convert_loc (loc, type,
12317 TREE_OPERAND (arg0, 0)),
12318 fold_convert_loc (loc, type,
12319 negate_expr (arg1)));
12320 }
12321 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12322 && TREE_CODE (arg1) == NEGATE_EXPR
12323 && negate_expr_p (arg0))
12324 {
12325 if (INTEGRAL_TYPE_P (type))
12326 fold_overflow_warning (("assuming signed overflow does not occur "
12327 "when distributing negation across "
12328 "division"),
12329 WARN_STRICT_OVERFLOW_MISC);
12330 return fold_build2_loc (loc, code, type,
12331 fold_convert_loc (loc, type,
12332 negate_expr (arg0)),
12333 fold_convert_loc (loc, type,
12334 TREE_OPERAND (arg1, 0)));
12335 }
12336
12337 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12338 operation, EXACT_DIV_EXPR.
12339
12340 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12341 At one time others generated faster code, it's not clear if they do
12342 after the last round to changes to the DIV code in expmed.c. */
12343 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12344 && multiple_of_p (type, arg0, arg1))
12345 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12346
12347 strict_overflow_p = false;
12348 if (TREE_CODE (arg1) == INTEGER_CST
12349 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12350 &strict_overflow_p)))
12351 {
12352 if (strict_overflow_p)
12353 fold_overflow_warning (("assuming signed overflow does not occur "
12354 "when simplifying division"),
12355 WARN_STRICT_OVERFLOW_MISC);
12356 return fold_convert_loc (loc, type, tem);
12357 }
12358
12359 return NULL_TREE;
12360
12361 case CEIL_MOD_EXPR:
12362 case FLOOR_MOD_EXPR:
12363 case ROUND_MOD_EXPR:
12364 case TRUNC_MOD_EXPR:
12365 /* X % 1 is always zero, but be sure to preserve any side
12366 effects in X. */
12367 if (integer_onep (arg1))
12368 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12369
12370 /* X % 0, return X % 0 unchanged so that we can get the
12371 proper warnings and errors. */
12372 if (integer_zerop (arg1))
12373 return NULL_TREE;
12374
12375 /* 0 % X is always zero, but be sure to preserve any side
12376 effects in X. Place this after checking for X == 0. */
12377 if (integer_zerop (arg0))
12378 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12379
12380 /* X % -1 is zero. */
12381 if (!TYPE_UNSIGNED (type)
12382 && TREE_CODE (arg1) == INTEGER_CST
12383 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12384 && TREE_INT_CST_HIGH (arg1) == -1)
12385 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12386
12387 /* X % -C is the same as X % C. */
12388 if (code == TRUNC_MOD_EXPR
12389 && !TYPE_UNSIGNED (type)
12390 && TREE_CODE (arg1) == INTEGER_CST
12391 && !TREE_OVERFLOW (arg1)
12392 && TREE_INT_CST_HIGH (arg1) < 0
12393 && !TYPE_OVERFLOW_TRAPS (type)
12394 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12395 && !sign_bit_p (arg1, arg1))
12396 return fold_build2_loc (loc, code, type,
12397 fold_convert_loc (loc, type, arg0),
12398 fold_convert_loc (loc, type,
12399 negate_expr (arg1)));
12400
12401 /* X % -Y is the same as X % Y. */
12402 if (code == TRUNC_MOD_EXPR
12403 && !TYPE_UNSIGNED (type)
12404 && TREE_CODE (arg1) == NEGATE_EXPR
12405 && !TYPE_OVERFLOW_TRAPS (type))
12406 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12407 fold_convert_loc (loc, type,
12408 TREE_OPERAND (arg1, 0)));
12409
12410 strict_overflow_p = false;
12411 if (TREE_CODE (arg1) == INTEGER_CST
12412 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12413 &strict_overflow_p)))
12414 {
12415 if (strict_overflow_p)
12416 fold_overflow_warning (("assuming signed overflow does not occur "
12417 "when simplifying modulus"),
12418 WARN_STRICT_OVERFLOW_MISC);
12419 return fold_convert_loc (loc, type, tem);
12420 }
12421
12422 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12423 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12424 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12425 && (TYPE_UNSIGNED (type)
12426 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12427 {
12428 tree c = arg1;
12429 /* Also optimize A % (C << N) where C is a power of 2,
12430 to A & ((C << N) - 1). */
12431 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12432 c = TREE_OPERAND (arg1, 0);
12433
12434 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12435 {
12436 tree mask
12437 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12438 build_int_cst (TREE_TYPE (arg1), 1));
12439 if (strict_overflow_p)
12440 fold_overflow_warning (("assuming signed overflow does not "
12441 "occur when simplifying "
12442 "X % (power of two)"),
12443 WARN_STRICT_OVERFLOW_MISC);
12444 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12445 fold_convert_loc (loc, type, arg0),
12446 fold_convert_loc (loc, type, mask));
12447 }
12448 }
12449
12450 return NULL_TREE;
12451
12452 case LROTATE_EXPR:
12453 case RROTATE_EXPR:
12454 if (integer_all_onesp (arg0))
12455 return omit_one_operand_loc (loc, type, arg0, arg1);
12456 goto shift;
12457
12458 case RSHIFT_EXPR:
12459 /* Optimize -1 >> x for arithmetic right shifts. */
12460 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12461 && tree_expr_nonnegative_p (arg1))
12462 return omit_one_operand_loc (loc, type, arg0, arg1);
12463 /* ... fall through ... */
12464
12465 case LSHIFT_EXPR:
12466 shift:
12467 if (integer_zerop (arg1))
12468 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12469 if (integer_zerop (arg0))
12470 return omit_one_operand_loc (loc, type, arg0, arg1);
12471
12472 /* Prefer vector1 << scalar to vector1 << vector2
12473 if vector2 is uniform. */
12474 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12475 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12476 return fold_build2_loc (loc, code, type, op0, tem);
12477
12478 /* Since negative shift count is not well-defined,
12479 don't try to compute it in the compiler. */
12480 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12481 return NULL_TREE;
12482
12483 prec = element_precision (type);
12484
12485 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12486 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12487 && TREE_INT_CST_LOW (arg1) < prec
12488 && host_integerp (TREE_OPERAND (arg0, 1), true)
12489 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12490 {
12491 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12492 + TREE_INT_CST_LOW (arg1));
12493
12494 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12495 being well defined. */
12496 if (low >= prec)
12497 {
12498 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12499 low = low % prec;
12500 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12501 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12502 TREE_OPERAND (arg0, 0));
12503 else
12504 low = prec - 1;
12505 }
12506
12507 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12508 build_int_cst (TREE_TYPE (arg1), low));
12509 }
12510
12511 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12512 into x & ((unsigned)-1 >> c) for unsigned types. */
12513 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12514 || (TYPE_UNSIGNED (type)
12515 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12516 && host_integerp (arg1, false)
12517 && TREE_INT_CST_LOW (arg1) < prec
12518 && host_integerp (TREE_OPERAND (arg0, 1), false)
12519 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12520 {
12521 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12522 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12523 tree lshift;
12524 tree arg00;
12525
12526 if (low0 == low1)
12527 {
12528 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12529
12530 lshift = build_minus_one_cst (type);
12531 lshift = const_binop (code, lshift, arg1);
12532
12533 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12534 }
12535 }
12536
12537 /* Rewrite an LROTATE_EXPR by a constant into an
12538 RROTATE_EXPR by a new constant. */
12539 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12540 {
12541 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12542 tem = const_binop (MINUS_EXPR, tem, arg1);
12543 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12544 }
12545
12546 /* If we have a rotate of a bit operation with the rotate count and
12547 the second operand of the bit operation both constant,
12548 permute the two operations. */
12549 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12550 && (TREE_CODE (arg0) == BIT_AND_EXPR
12551 || TREE_CODE (arg0) == BIT_IOR_EXPR
12552 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12553 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12554 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12555 fold_build2_loc (loc, code, type,
12556 TREE_OPERAND (arg0, 0), arg1),
12557 fold_build2_loc (loc, code, type,
12558 TREE_OPERAND (arg0, 1), arg1));
12559
12560 /* Two consecutive rotates adding up to the precision of the
12561 type can be ignored. */
12562 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12563 && TREE_CODE (arg0) == RROTATE_EXPR
12564 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12565 && TREE_INT_CST_HIGH (arg1) == 0
12566 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12567 && ((TREE_INT_CST_LOW (arg1)
12568 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12569 == prec))
12570 return TREE_OPERAND (arg0, 0);
12571
12572 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12573 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12574 if the latter can be further optimized. */
12575 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12576 && TREE_CODE (arg0) == BIT_AND_EXPR
12577 && TREE_CODE (arg1) == INTEGER_CST
12578 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12579 {
12580 tree mask = fold_build2_loc (loc, code, type,
12581 fold_convert_loc (loc, type,
12582 TREE_OPERAND (arg0, 1)),
12583 arg1);
12584 tree shift = fold_build2_loc (loc, code, type,
12585 fold_convert_loc (loc, type,
12586 TREE_OPERAND (arg0, 0)),
12587 arg1);
12588 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12589 if (tem)
12590 return tem;
12591 }
12592
12593 return NULL_TREE;
12594
12595 case MIN_EXPR:
12596 if (operand_equal_p (arg0, arg1, 0))
12597 return omit_one_operand_loc (loc, type, arg0, arg1);
12598 if (INTEGRAL_TYPE_P (type)
12599 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12600 return omit_one_operand_loc (loc, type, arg1, arg0);
12601 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12602 if (tem)
12603 return tem;
12604 goto associate;
12605
12606 case MAX_EXPR:
12607 if (operand_equal_p (arg0, arg1, 0))
12608 return omit_one_operand_loc (loc, type, arg0, arg1);
12609 if (INTEGRAL_TYPE_P (type)
12610 && TYPE_MAX_VALUE (type)
12611 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12612 return omit_one_operand_loc (loc, type, arg1, arg0);
12613 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12614 if (tem)
12615 return tem;
12616 goto associate;
12617
12618 case TRUTH_ANDIF_EXPR:
12619 /* Note that the operands of this must be ints
12620 and their values must be 0 or 1.
12621 ("true" is a fixed value perhaps depending on the language.) */
12622 /* If first arg is constant zero, return it. */
12623 if (integer_zerop (arg0))
12624 return fold_convert_loc (loc, type, arg0);
12625 case TRUTH_AND_EXPR:
12626 /* If either arg is constant true, drop it. */
12627 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12628 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12629 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12630 /* Preserve sequence points. */
12631 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12632 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12633 /* If second arg is constant zero, result is zero, but first arg
12634 must be evaluated. */
12635 if (integer_zerop (arg1))
12636 return omit_one_operand_loc (loc, type, arg1, arg0);
12637 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12638 case will be handled here. */
12639 if (integer_zerop (arg0))
12640 return omit_one_operand_loc (loc, type, arg0, arg1);
12641
12642 /* !X && X is always false. */
12643 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12644 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12645 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12646 /* X && !X is always false. */
12647 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12648 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12649 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12650
12651 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12652 means A >= Y && A != MAX, but in this case we know that
12653 A < X <= MAX. */
12654
12655 if (!TREE_SIDE_EFFECTS (arg0)
12656 && !TREE_SIDE_EFFECTS (arg1))
12657 {
12658 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12659 if (tem && !operand_equal_p (tem, arg0, 0))
12660 return fold_build2_loc (loc, code, type, tem, arg1);
12661
12662 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12663 if (tem && !operand_equal_p (tem, arg1, 0))
12664 return fold_build2_loc (loc, code, type, arg0, tem);
12665 }
12666
12667 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12668 != NULL_TREE)
12669 return tem;
12670
12671 return NULL_TREE;
12672
12673 case TRUTH_ORIF_EXPR:
12674 /* Note that the operands of this must be ints
12675 and their values must be 0 or true.
12676 ("true" is a fixed value perhaps depending on the language.) */
12677 /* If first arg is constant true, return it. */
12678 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12679 return fold_convert_loc (loc, type, arg0);
12680 case TRUTH_OR_EXPR:
12681 /* If either arg is constant zero, drop it. */
12682 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12683 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12684 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12685 /* Preserve sequence points. */
12686 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12687 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12688 /* If second arg is constant true, result is true, but we must
12689 evaluate first arg. */
12690 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12691 return omit_one_operand_loc (loc, type, arg1, arg0);
12692 /* Likewise for first arg, but note this only occurs here for
12693 TRUTH_OR_EXPR. */
12694 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12695 return omit_one_operand_loc (loc, type, arg0, arg1);
12696
12697 /* !X || X is always true. */
12698 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12699 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12700 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12701 /* X || !X is always true. */
12702 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12703 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12704 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12705
12706 /* (X && !Y) || (!X && Y) is X ^ Y */
12707 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12708 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12709 {
12710 tree a0, a1, l0, l1, n0, n1;
12711
12712 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12713 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12714
12715 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12716 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12717
12718 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12719 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12720
12721 if ((operand_equal_p (n0, a0, 0)
12722 && operand_equal_p (n1, a1, 0))
12723 || (operand_equal_p (n0, a1, 0)
12724 && operand_equal_p (n1, a0, 0)))
12725 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12726 }
12727
12728 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12729 != NULL_TREE)
12730 return tem;
12731
12732 return NULL_TREE;
12733
12734 case TRUTH_XOR_EXPR:
12735 /* If the second arg is constant zero, drop it. */
12736 if (integer_zerop (arg1))
12737 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12738 /* If the second arg is constant true, this is a logical inversion. */
12739 if (integer_onep (arg1))
12740 {
12741 tem = invert_truthvalue_loc (loc, arg0);
12742 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12743 }
12744 /* Identical arguments cancel to zero. */
12745 if (operand_equal_p (arg0, arg1, 0))
12746 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12747
12748 /* !X ^ X is always true. */
12749 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12750 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12751 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12752
12753 /* X ^ !X is always true. */
12754 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12755 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12756 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12757
12758 return NULL_TREE;
12759
12760 case EQ_EXPR:
12761 case NE_EXPR:
12762 STRIP_NOPS (arg0);
12763 STRIP_NOPS (arg1);
12764
12765 tem = fold_comparison (loc, code, type, op0, op1);
12766 if (tem != NULL_TREE)
12767 return tem;
12768
12769 /* bool_var != 0 becomes bool_var. */
12770 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12771 && code == NE_EXPR)
12772 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12773
12774 /* bool_var == 1 becomes bool_var. */
12775 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12776 && code == EQ_EXPR)
12777 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12778
12779 /* bool_var != 1 becomes !bool_var. */
12780 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12781 && code == NE_EXPR)
12782 return fold_convert_loc (loc, type,
12783 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12784 TREE_TYPE (arg0), arg0));
12785
12786 /* bool_var == 0 becomes !bool_var. */
12787 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12788 && code == EQ_EXPR)
12789 return fold_convert_loc (loc, type,
12790 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12791 TREE_TYPE (arg0), arg0));
12792
12793 /* !exp != 0 becomes !exp */
12794 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12795 && code == NE_EXPR)
12796 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12797
12798 /* If this is an equality comparison of the address of two non-weak,
12799 unaliased symbols neither of which are extern (since we do not
12800 have access to attributes for externs), then we know the result. */
12801 if (TREE_CODE (arg0) == ADDR_EXPR
12802 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12803 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12804 && ! lookup_attribute ("alias",
12805 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12806 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12807 && TREE_CODE (arg1) == ADDR_EXPR
12808 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12809 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12810 && ! lookup_attribute ("alias",
12811 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12812 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12813 {
12814 /* We know that we're looking at the address of two
12815 non-weak, unaliased, static _DECL nodes.
12816
12817 It is both wasteful and incorrect to call operand_equal_p
12818 to compare the two ADDR_EXPR nodes. It is wasteful in that
12819 all we need to do is test pointer equality for the arguments
12820 to the two ADDR_EXPR nodes. It is incorrect to use
12821 operand_equal_p as that function is NOT equivalent to a
12822 C equality test. It can in fact return false for two
12823 objects which would test as equal using the C equality
12824 operator. */
12825 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12826 return constant_boolean_node (equal
12827 ? code == EQ_EXPR : code != EQ_EXPR,
12828 type);
12829 }
12830
12831 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12832 a MINUS_EXPR of a constant, we can convert it into a comparison with
12833 a revised constant as long as no overflow occurs. */
12834 if (TREE_CODE (arg1) == INTEGER_CST
12835 && (TREE_CODE (arg0) == PLUS_EXPR
12836 || TREE_CODE (arg0) == MINUS_EXPR)
12837 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12838 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12839 ? MINUS_EXPR : PLUS_EXPR,
12840 fold_convert_loc (loc, TREE_TYPE (arg0),
12841 arg1),
12842 TREE_OPERAND (arg0, 1)))
12843 && !TREE_OVERFLOW (tem))
12844 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12845
12846 /* Similarly for a NEGATE_EXPR. */
12847 if (TREE_CODE (arg0) == NEGATE_EXPR
12848 && TREE_CODE (arg1) == INTEGER_CST
12849 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12850 arg1)))
12851 && TREE_CODE (tem) == INTEGER_CST
12852 && !TREE_OVERFLOW (tem))
12853 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12854
12855 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12856 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12857 && TREE_CODE (arg1) == INTEGER_CST
12858 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12859 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12860 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12861 fold_convert_loc (loc,
12862 TREE_TYPE (arg0),
12863 arg1),
12864 TREE_OPERAND (arg0, 1)));
12865
12866 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12867 if ((TREE_CODE (arg0) == PLUS_EXPR
12868 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12869 || TREE_CODE (arg0) == MINUS_EXPR)
12870 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12871 0)),
12872 arg1, 0)
12873 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12874 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12875 {
12876 tree val = TREE_OPERAND (arg0, 1);
12877 return omit_two_operands_loc (loc, type,
12878 fold_build2_loc (loc, code, type,
12879 val,
12880 build_int_cst (TREE_TYPE (val),
12881 0)),
12882 TREE_OPERAND (arg0, 0), arg1);
12883 }
12884
12885 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12886 if (TREE_CODE (arg0) == MINUS_EXPR
12887 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12888 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12889 1)),
12890 arg1, 0)
12891 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12892 {
12893 return omit_two_operands_loc (loc, type,
12894 code == NE_EXPR
12895 ? boolean_true_node : boolean_false_node,
12896 TREE_OPERAND (arg0, 1), arg1);
12897 }
12898
12899 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12900 for !=. Don't do this for ordered comparisons due to overflow. */
12901 if (TREE_CODE (arg0) == MINUS_EXPR
12902 && integer_zerop (arg1))
12903 return fold_build2_loc (loc, code, type,
12904 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12905
12906 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12907 if (TREE_CODE (arg0) == ABS_EXPR
12908 && (integer_zerop (arg1) || real_zerop (arg1)))
12909 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12910
12911 /* If this is an EQ or NE comparison with zero and ARG0 is
12912 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12913 two operations, but the latter can be done in one less insn
12914 on machines that have only two-operand insns or on which a
12915 constant cannot be the first operand. */
12916 if (TREE_CODE (arg0) == BIT_AND_EXPR
12917 && integer_zerop (arg1))
12918 {
12919 tree arg00 = TREE_OPERAND (arg0, 0);
12920 tree arg01 = TREE_OPERAND (arg0, 1);
12921 if (TREE_CODE (arg00) == LSHIFT_EXPR
12922 && integer_onep (TREE_OPERAND (arg00, 0)))
12923 {
12924 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12925 arg01, TREE_OPERAND (arg00, 1));
12926 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12927 build_int_cst (TREE_TYPE (arg0), 1));
12928 return fold_build2_loc (loc, code, type,
12929 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12930 arg1);
12931 }
12932 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12933 && integer_onep (TREE_OPERAND (arg01, 0)))
12934 {
12935 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12936 arg00, TREE_OPERAND (arg01, 1));
12937 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12938 build_int_cst (TREE_TYPE (arg0), 1));
12939 return fold_build2_loc (loc, code, type,
12940 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12941 arg1);
12942 }
12943 }
12944
12945 /* If this is an NE or EQ comparison of zero against the result of a
12946 signed MOD operation whose second operand is a power of 2, make
12947 the MOD operation unsigned since it is simpler and equivalent. */
12948 if (integer_zerop (arg1)
12949 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12950 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12951 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12952 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12953 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12954 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12955 {
12956 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12957 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12958 fold_convert_loc (loc, newtype,
12959 TREE_OPERAND (arg0, 0)),
12960 fold_convert_loc (loc, newtype,
12961 TREE_OPERAND (arg0, 1)));
12962
12963 return fold_build2_loc (loc, code, type, newmod,
12964 fold_convert_loc (loc, newtype, arg1));
12965 }
12966
12967 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12968 C1 is a valid shift constant, and C2 is a power of two, i.e.
12969 a single bit. */
12970 if (TREE_CODE (arg0) == BIT_AND_EXPR
12971 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12972 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12973 == INTEGER_CST
12974 && integer_pow2p (TREE_OPERAND (arg0, 1))
12975 && integer_zerop (arg1))
12976 {
12977 tree itype = TREE_TYPE (arg0);
12978 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12979 prec = TYPE_PRECISION (itype);
12980
12981 /* Check for a valid shift count. */
12982 if (TREE_INT_CST_HIGH (arg001) == 0
12983 && TREE_INT_CST_LOW (arg001) < prec)
12984 {
12985 tree arg01 = TREE_OPERAND (arg0, 1);
12986 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12987 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12988 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12989 can be rewritten as (X & (C2 << C1)) != 0. */
12990 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12991 {
12992 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12993 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12994 return fold_build2_loc (loc, code, type, tem,
12995 fold_convert_loc (loc, itype, arg1));
12996 }
12997 /* Otherwise, for signed (arithmetic) shifts,
12998 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12999 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13000 else if (!TYPE_UNSIGNED (itype))
13001 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13002 arg000, build_int_cst (itype, 0));
13003 /* Otherwise, of unsigned (logical) shifts,
13004 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13005 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13006 else
13007 return omit_one_operand_loc (loc, type,
13008 code == EQ_EXPR ? integer_one_node
13009 : integer_zero_node,
13010 arg000);
13011 }
13012 }
13013
13014 /* If we have (A & C) == C where C is a power of 2, convert this into
13015 (A & C) != 0. Similarly for NE_EXPR. */
13016 if (TREE_CODE (arg0) == BIT_AND_EXPR
13017 && integer_pow2p (TREE_OPERAND (arg0, 1))
13018 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13019 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13020 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13021 integer_zero_node));
13022
13023 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13024 bit, then fold the expression into A < 0 or A >= 0. */
13025 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13026 if (tem)
13027 return tem;
13028
13029 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13030 Similarly for NE_EXPR. */
13031 if (TREE_CODE (arg0) == BIT_AND_EXPR
13032 && TREE_CODE (arg1) == INTEGER_CST
13033 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13034 {
13035 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13036 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13037 TREE_OPERAND (arg0, 1));
13038 tree dandnotc
13039 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13040 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13041 notc);
13042 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13043 if (integer_nonzerop (dandnotc))
13044 return omit_one_operand_loc (loc, type, rslt, arg0);
13045 }
13046
13047 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13048 Similarly for NE_EXPR. */
13049 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13050 && TREE_CODE (arg1) == INTEGER_CST
13051 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13052 {
13053 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13054 tree candnotd
13055 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13056 TREE_OPERAND (arg0, 1),
13057 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13058 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13059 if (integer_nonzerop (candnotd))
13060 return omit_one_operand_loc (loc, type, rslt, arg0);
13061 }
13062
13063 /* If this is a comparison of a field, we may be able to simplify it. */
13064 if ((TREE_CODE (arg0) == COMPONENT_REF
13065 || TREE_CODE (arg0) == BIT_FIELD_REF)
13066 /* Handle the constant case even without -O
13067 to make sure the warnings are given. */
13068 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13069 {
13070 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13071 if (t1)
13072 return t1;
13073 }
13074
13075 /* Optimize comparisons of strlen vs zero to a compare of the
13076 first character of the string vs zero. To wit,
13077 strlen(ptr) == 0 => *ptr == 0
13078 strlen(ptr) != 0 => *ptr != 0
13079 Other cases should reduce to one of these two (or a constant)
13080 due to the return value of strlen being unsigned. */
13081 if (TREE_CODE (arg0) == CALL_EXPR
13082 && integer_zerop (arg1))
13083 {
13084 tree fndecl = get_callee_fndecl (arg0);
13085
13086 if (fndecl
13087 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13088 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13089 && call_expr_nargs (arg0) == 1
13090 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13091 {
13092 tree iref = build_fold_indirect_ref_loc (loc,
13093 CALL_EXPR_ARG (arg0, 0));
13094 return fold_build2_loc (loc, code, type, iref,
13095 build_int_cst (TREE_TYPE (iref), 0));
13096 }
13097 }
13098
13099 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13100 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13101 if (TREE_CODE (arg0) == RSHIFT_EXPR
13102 && integer_zerop (arg1)
13103 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13104 {
13105 tree arg00 = TREE_OPERAND (arg0, 0);
13106 tree arg01 = TREE_OPERAND (arg0, 1);
13107 tree itype = TREE_TYPE (arg00);
13108 if (TREE_INT_CST_HIGH (arg01) == 0
13109 && TREE_INT_CST_LOW (arg01)
13110 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13111 {
13112 if (TYPE_UNSIGNED (itype))
13113 {
13114 itype = signed_type_for (itype);
13115 arg00 = fold_convert_loc (loc, itype, arg00);
13116 }
13117 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13118 type, arg00, build_zero_cst (itype));
13119 }
13120 }
13121
13122 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13123 if (integer_zerop (arg1)
13124 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13125 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13126 TREE_OPERAND (arg0, 1));
13127
13128 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13129 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13130 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13131 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13132 build_zero_cst (TREE_TYPE (arg0)));
13133 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13134 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13135 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13136 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13137 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13138 build_zero_cst (TREE_TYPE (arg0)));
13139
13140 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13141 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13142 && TREE_CODE (arg1) == INTEGER_CST
13143 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13144 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13145 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13146 TREE_OPERAND (arg0, 1), arg1));
13147
13148 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13149 (X & C) == 0 when C is a single bit. */
13150 if (TREE_CODE (arg0) == BIT_AND_EXPR
13151 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13152 && integer_zerop (arg1)
13153 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13154 {
13155 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13156 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13157 TREE_OPERAND (arg0, 1));
13158 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13159 type, tem,
13160 fold_convert_loc (loc, TREE_TYPE (arg0),
13161 arg1));
13162 }
13163
13164 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13165 constant C is a power of two, i.e. a single bit. */
13166 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13167 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13168 && integer_zerop (arg1)
13169 && integer_pow2p (TREE_OPERAND (arg0, 1))
13170 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13171 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13172 {
13173 tree arg00 = TREE_OPERAND (arg0, 0);
13174 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13175 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13176 }
13177
13178 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13179 when is C is a power of two, i.e. a single bit. */
13180 if (TREE_CODE (arg0) == BIT_AND_EXPR
13181 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13182 && integer_zerop (arg1)
13183 && integer_pow2p (TREE_OPERAND (arg0, 1))
13184 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13185 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13186 {
13187 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13188 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13189 arg000, TREE_OPERAND (arg0, 1));
13190 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13191 tem, build_int_cst (TREE_TYPE (tem), 0));
13192 }
13193
13194 if (integer_zerop (arg1)
13195 && tree_expr_nonzero_p (arg0))
13196 {
13197 tree res = constant_boolean_node (code==NE_EXPR, type);
13198 return omit_one_operand_loc (loc, type, res, arg0);
13199 }
13200
13201 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13202 if (TREE_CODE (arg0) == NEGATE_EXPR
13203 && TREE_CODE (arg1) == NEGATE_EXPR)
13204 return fold_build2_loc (loc, code, type,
13205 TREE_OPERAND (arg0, 0),
13206 fold_convert_loc (loc, TREE_TYPE (arg0),
13207 TREE_OPERAND (arg1, 0)));
13208
13209 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13210 if (TREE_CODE (arg0) == BIT_AND_EXPR
13211 && TREE_CODE (arg1) == BIT_AND_EXPR)
13212 {
13213 tree arg00 = TREE_OPERAND (arg0, 0);
13214 tree arg01 = TREE_OPERAND (arg0, 1);
13215 tree arg10 = TREE_OPERAND (arg1, 0);
13216 tree arg11 = TREE_OPERAND (arg1, 1);
13217 tree itype = TREE_TYPE (arg0);
13218
13219 if (operand_equal_p (arg01, arg11, 0))
13220 return fold_build2_loc (loc, code, type,
13221 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13222 fold_build2_loc (loc,
13223 BIT_XOR_EXPR, itype,
13224 arg00, arg10),
13225 arg01),
13226 build_zero_cst (itype));
13227
13228 if (operand_equal_p (arg01, arg10, 0))
13229 return fold_build2_loc (loc, code, type,
13230 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13231 fold_build2_loc (loc,
13232 BIT_XOR_EXPR, itype,
13233 arg00, arg11),
13234 arg01),
13235 build_zero_cst (itype));
13236
13237 if (operand_equal_p (arg00, arg11, 0))
13238 return fold_build2_loc (loc, code, type,
13239 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13240 fold_build2_loc (loc,
13241 BIT_XOR_EXPR, itype,
13242 arg01, arg10),
13243 arg00),
13244 build_zero_cst (itype));
13245
13246 if (operand_equal_p (arg00, arg10, 0))
13247 return fold_build2_loc (loc, code, type,
13248 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13249 fold_build2_loc (loc,
13250 BIT_XOR_EXPR, itype,
13251 arg01, arg11),
13252 arg00),
13253 build_zero_cst (itype));
13254 }
13255
13256 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13257 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13258 {
13259 tree arg00 = TREE_OPERAND (arg0, 0);
13260 tree arg01 = TREE_OPERAND (arg0, 1);
13261 tree arg10 = TREE_OPERAND (arg1, 0);
13262 tree arg11 = TREE_OPERAND (arg1, 1);
13263 tree itype = TREE_TYPE (arg0);
13264
13265 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13266 operand_equal_p guarantees no side-effects so we don't need
13267 to use omit_one_operand on Z. */
13268 if (operand_equal_p (arg01, arg11, 0))
13269 return fold_build2_loc (loc, code, type, arg00,
13270 fold_convert_loc (loc, TREE_TYPE (arg00),
13271 arg10));
13272 if (operand_equal_p (arg01, arg10, 0))
13273 return fold_build2_loc (loc, code, type, arg00,
13274 fold_convert_loc (loc, TREE_TYPE (arg00),
13275 arg11));
13276 if (operand_equal_p (arg00, arg11, 0))
13277 return fold_build2_loc (loc, code, type, arg01,
13278 fold_convert_loc (loc, TREE_TYPE (arg01),
13279 arg10));
13280 if (operand_equal_p (arg00, arg10, 0))
13281 return fold_build2_loc (loc, code, type, arg01,
13282 fold_convert_loc (loc, TREE_TYPE (arg01),
13283 arg11));
13284
13285 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13286 if (TREE_CODE (arg01) == INTEGER_CST
13287 && TREE_CODE (arg11) == INTEGER_CST)
13288 {
13289 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13290 fold_convert_loc (loc, itype, arg11));
13291 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13292 return fold_build2_loc (loc, code, type, tem,
13293 fold_convert_loc (loc, itype, arg10));
13294 }
13295 }
13296
13297 /* Attempt to simplify equality/inequality comparisons of complex
13298 values. Only lower the comparison if the result is known or
13299 can be simplified to a single scalar comparison. */
13300 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13301 || TREE_CODE (arg0) == COMPLEX_CST)
13302 && (TREE_CODE (arg1) == COMPLEX_EXPR
13303 || TREE_CODE (arg1) == COMPLEX_CST))
13304 {
13305 tree real0, imag0, real1, imag1;
13306 tree rcond, icond;
13307
13308 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13309 {
13310 real0 = TREE_OPERAND (arg0, 0);
13311 imag0 = TREE_OPERAND (arg0, 1);
13312 }
13313 else
13314 {
13315 real0 = TREE_REALPART (arg0);
13316 imag0 = TREE_IMAGPART (arg0);
13317 }
13318
13319 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13320 {
13321 real1 = TREE_OPERAND (arg1, 0);
13322 imag1 = TREE_OPERAND (arg1, 1);
13323 }
13324 else
13325 {
13326 real1 = TREE_REALPART (arg1);
13327 imag1 = TREE_IMAGPART (arg1);
13328 }
13329
13330 rcond = fold_binary_loc (loc, code, type, real0, real1);
13331 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13332 {
13333 if (integer_zerop (rcond))
13334 {
13335 if (code == EQ_EXPR)
13336 return omit_two_operands_loc (loc, type, boolean_false_node,
13337 imag0, imag1);
13338 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13339 }
13340 else
13341 {
13342 if (code == NE_EXPR)
13343 return omit_two_operands_loc (loc, type, boolean_true_node,
13344 imag0, imag1);
13345 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13346 }
13347 }
13348
13349 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13350 if (icond && TREE_CODE (icond) == INTEGER_CST)
13351 {
13352 if (integer_zerop (icond))
13353 {
13354 if (code == EQ_EXPR)
13355 return omit_two_operands_loc (loc, type, boolean_false_node,
13356 real0, real1);
13357 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13358 }
13359 else
13360 {
13361 if (code == NE_EXPR)
13362 return omit_two_operands_loc (loc, type, boolean_true_node,
13363 real0, real1);
13364 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13365 }
13366 }
13367 }
13368
13369 return NULL_TREE;
13370
13371 case LT_EXPR:
13372 case GT_EXPR:
13373 case LE_EXPR:
13374 case GE_EXPR:
13375 tem = fold_comparison (loc, code, type, op0, op1);
13376 if (tem != NULL_TREE)
13377 return tem;
13378
13379 /* Transform comparisons of the form X +- C CMP X. */
13380 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13381 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13382 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13383 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13384 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13385 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13386 {
13387 tree arg01 = TREE_OPERAND (arg0, 1);
13388 enum tree_code code0 = TREE_CODE (arg0);
13389 int is_positive;
13390
13391 if (TREE_CODE (arg01) == REAL_CST)
13392 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13393 else
13394 is_positive = tree_int_cst_sgn (arg01);
13395
13396 /* (X - c) > X becomes false. */
13397 if (code == GT_EXPR
13398 && ((code0 == MINUS_EXPR && is_positive >= 0)
13399 || (code0 == PLUS_EXPR && is_positive <= 0)))
13400 {
13401 if (TREE_CODE (arg01) == INTEGER_CST
13402 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13403 fold_overflow_warning (("assuming signed overflow does not "
13404 "occur when assuming that (X - c) > X "
13405 "is always false"),
13406 WARN_STRICT_OVERFLOW_ALL);
13407 return constant_boolean_node (0, type);
13408 }
13409
13410 /* Likewise (X + c) < X becomes false. */
13411 if (code == LT_EXPR
13412 && ((code0 == PLUS_EXPR && is_positive >= 0)
13413 || (code0 == MINUS_EXPR && is_positive <= 0)))
13414 {
13415 if (TREE_CODE (arg01) == INTEGER_CST
13416 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13417 fold_overflow_warning (("assuming signed overflow does not "
13418 "occur when assuming that "
13419 "(X + c) < X is always false"),
13420 WARN_STRICT_OVERFLOW_ALL);
13421 return constant_boolean_node (0, type);
13422 }
13423
13424 /* Convert (X - c) <= X to true. */
13425 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13426 && code == LE_EXPR
13427 && ((code0 == MINUS_EXPR && is_positive >= 0)
13428 || (code0 == PLUS_EXPR && is_positive <= 0)))
13429 {
13430 if (TREE_CODE (arg01) == INTEGER_CST
13431 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13432 fold_overflow_warning (("assuming signed overflow does not "
13433 "occur when assuming that "
13434 "(X - c) <= X is always true"),
13435 WARN_STRICT_OVERFLOW_ALL);
13436 return constant_boolean_node (1, type);
13437 }
13438
13439 /* Convert (X + c) >= X to true. */
13440 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13441 && code == GE_EXPR
13442 && ((code0 == PLUS_EXPR && is_positive >= 0)
13443 || (code0 == MINUS_EXPR && is_positive <= 0)))
13444 {
13445 if (TREE_CODE (arg01) == INTEGER_CST
13446 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13447 fold_overflow_warning (("assuming signed overflow does not "
13448 "occur when assuming that "
13449 "(X + c) >= X is always true"),
13450 WARN_STRICT_OVERFLOW_ALL);
13451 return constant_boolean_node (1, type);
13452 }
13453
13454 if (TREE_CODE (arg01) == INTEGER_CST)
13455 {
13456 /* Convert X + c > X and X - c < X to true for integers. */
13457 if (code == GT_EXPR
13458 && ((code0 == PLUS_EXPR && is_positive > 0)
13459 || (code0 == MINUS_EXPR && is_positive < 0)))
13460 {
13461 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13462 fold_overflow_warning (("assuming signed overflow does "
13463 "not occur when assuming that "
13464 "(X + c) > X is always true"),
13465 WARN_STRICT_OVERFLOW_ALL);
13466 return constant_boolean_node (1, type);
13467 }
13468
13469 if (code == LT_EXPR
13470 && ((code0 == MINUS_EXPR && is_positive > 0)
13471 || (code0 == PLUS_EXPR && is_positive < 0)))
13472 {
13473 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13474 fold_overflow_warning (("assuming signed overflow does "
13475 "not occur when assuming that "
13476 "(X - c) < X is always true"),
13477 WARN_STRICT_OVERFLOW_ALL);
13478 return constant_boolean_node (1, type);
13479 }
13480
13481 /* Convert X + c <= X and X - c >= X to false for integers. */
13482 if (code == LE_EXPR
13483 && ((code0 == PLUS_EXPR && is_positive > 0)
13484 || (code0 == MINUS_EXPR && is_positive < 0)))
13485 {
13486 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13487 fold_overflow_warning (("assuming signed overflow does "
13488 "not occur when assuming that "
13489 "(X + c) <= X is always false"),
13490 WARN_STRICT_OVERFLOW_ALL);
13491 return constant_boolean_node (0, type);
13492 }
13493
13494 if (code == GE_EXPR
13495 && ((code0 == MINUS_EXPR && is_positive > 0)
13496 || (code0 == PLUS_EXPR && is_positive < 0)))
13497 {
13498 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13499 fold_overflow_warning (("assuming signed overflow does "
13500 "not occur when assuming that "
13501 "(X - c) >= X is always false"),
13502 WARN_STRICT_OVERFLOW_ALL);
13503 return constant_boolean_node (0, type);
13504 }
13505 }
13506 }
13507
13508 /* Comparisons with the highest or lowest possible integer of
13509 the specified precision will have known values. */
13510 {
13511 tree arg1_type = TREE_TYPE (arg1);
13512 unsigned int width = TYPE_PRECISION (arg1_type);
13513
13514 if (TREE_CODE (arg1) == INTEGER_CST
13515 && width <= HOST_BITS_PER_DOUBLE_INT
13516 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13517 {
13518 HOST_WIDE_INT signed_max_hi;
13519 unsigned HOST_WIDE_INT signed_max_lo;
13520 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13521
13522 if (width <= HOST_BITS_PER_WIDE_INT)
13523 {
13524 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13525 - 1;
13526 signed_max_hi = 0;
13527 max_hi = 0;
13528
13529 if (TYPE_UNSIGNED (arg1_type))
13530 {
13531 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13532 min_lo = 0;
13533 min_hi = 0;
13534 }
13535 else
13536 {
13537 max_lo = signed_max_lo;
13538 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13539 min_hi = -1;
13540 }
13541 }
13542 else
13543 {
13544 width -= HOST_BITS_PER_WIDE_INT;
13545 signed_max_lo = -1;
13546 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13547 - 1;
13548 max_lo = -1;
13549 min_lo = 0;
13550
13551 if (TYPE_UNSIGNED (arg1_type))
13552 {
13553 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13554 min_hi = 0;
13555 }
13556 else
13557 {
13558 max_hi = signed_max_hi;
13559 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13560 }
13561 }
13562
13563 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13564 && TREE_INT_CST_LOW (arg1) == max_lo)
13565 switch (code)
13566 {
13567 case GT_EXPR:
13568 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13569
13570 case GE_EXPR:
13571 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13572
13573 case LE_EXPR:
13574 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13575
13576 case LT_EXPR:
13577 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13578
13579 /* The GE_EXPR and LT_EXPR cases above are not normally
13580 reached because of previous transformations. */
13581
13582 default:
13583 break;
13584 }
13585 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13586 == max_hi
13587 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13588 switch (code)
13589 {
13590 case GT_EXPR:
13591 arg1 = const_binop (PLUS_EXPR, arg1,
13592 build_int_cst (TREE_TYPE (arg1), 1));
13593 return fold_build2_loc (loc, EQ_EXPR, type,
13594 fold_convert_loc (loc,
13595 TREE_TYPE (arg1), arg0),
13596 arg1);
13597 case LE_EXPR:
13598 arg1 = const_binop (PLUS_EXPR, arg1,
13599 build_int_cst (TREE_TYPE (arg1), 1));
13600 return fold_build2_loc (loc, NE_EXPR, type,
13601 fold_convert_loc (loc, TREE_TYPE (arg1),
13602 arg0),
13603 arg1);
13604 default:
13605 break;
13606 }
13607 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13608 == min_hi
13609 && TREE_INT_CST_LOW (arg1) == min_lo)
13610 switch (code)
13611 {
13612 case LT_EXPR:
13613 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13614
13615 case LE_EXPR:
13616 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13617
13618 case GE_EXPR:
13619 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13620
13621 case GT_EXPR:
13622 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13623
13624 default:
13625 break;
13626 }
13627 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13628 == min_hi
13629 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13630 switch (code)
13631 {
13632 case GE_EXPR:
13633 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13634 return fold_build2_loc (loc, NE_EXPR, type,
13635 fold_convert_loc (loc,
13636 TREE_TYPE (arg1), arg0),
13637 arg1);
13638 case LT_EXPR:
13639 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13640 return fold_build2_loc (loc, EQ_EXPR, type,
13641 fold_convert_loc (loc, TREE_TYPE (arg1),
13642 arg0),
13643 arg1);
13644 default:
13645 break;
13646 }
13647
13648 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13649 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13650 && TYPE_UNSIGNED (arg1_type)
13651 /* We will flip the signedness of the comparison operator
13652 associated with the mode of arg1, so the sign bit is
13653 specified by this mode. Check that arg1 is the signed
13654 max associated with this sign bit. */
13655 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13656 /* signed_type does not work on pointer types. */
13657 && INTEGRAL_TYPE_P (arg1_type))
13658 {
13659 /* The following case also applies to X < signed_max+1
13660 and X >= signed_max+1 because previous transformations. */
13661 if (code == LE_EXPR || code == GT_EXPR)
13662 {
13663 tree st;
13664 st = signed_type_for (TREE_TYPE (arg1));
13665 return fold_build2_loc (loc,
13666 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13667 type, fold_convert_loc (loc, st, arg0),
13668 build_int_cst (st, 0));
13669 }
13670 }
13671 }
13672 }
13673
13674 /* If we are comparing an ABS_EXPR with a constant, we can
13675 convert all the cases into explicit comparisons, but they may
13676 well not be faster than doing the ABS and one comparison.
13677 But ABS (X) <= C is a range comparison, which becomes a subtraction
13678 and a comparison, and is probably faster. */
13679 if (code == LE_EXPR
13680 && TREE_CODE (arg1) == INTEGER_CST
13681 && TREE_CODE (arg0) == ABS_EXPR
13682 && ! TREE_SIDE_EFFECTS (arg0)
13683 && (0 != (tem = negate_expr (arg1)))
13684 && TREE_CODE (tem) == INTEGER_CST
13685 && !TREE_OVERFLOW (tem))
13686 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13687 build2 (GE_EXPR, type,
13688 TREE_OPERAND (arg0, 0), tem),
13689 build2 (LE_EXPR, type,
13690 TREE_OPERAND (arg0, 0), arg1));
13691
13692 /* Convert ABS_EXPR<x> >= 0 to true. */
13693 strict_overflow_p = false;
13694 if (code == GE_EXPR
13695 && (integer_zerop (arg1)
13696 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13697 && real_zerop (arg1)))
13698 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13699 {
13700 if (strict_overflow_p)
13701 fold_overflow_warning (("assuming signed overflow does not occur "
13702 "when simplifying comparison of "
13703 "absolute value and zero"),
13704 WARN_STRICT_OVERFLOW_CONDITIONAL);
13705 return omit_one_operand_loc (loc, type,
13706 constant_boolean_node (true, type),
13707 arg0);
13708 }
13709
13710 /* Convert ABS_EXPR<x> < 0 to false. */
13711 strict_overflow_p = false;
13712 if (code == LT_EXPR
13713 && (integer_zerop (arg1) || real_zerop (arg1))
13714 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13715 {
13716 if (strict_overflow_p)
13717 fold_overflow_warning (("assuming signed overflow does not occur "
13718 "when simplifying comparison of "
13719 "absolute value and zero"),
13720 WARN_STRICT_OVERFLOW_CONDITIONAL);
13721 return omit_one_operand_loc (loc, type,
13722 constant_boolean_node (false, type),
13723 arg0);
13724 }
13725
13726 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13727 and similarly for >= into !=. */
13728 if ((code == LT_EXPR || code == GE_EXPR)
13729 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13730 && TREE_CODE (arg1) == LSHIFT_EXPR
13731 && integer_onep (TREE_OPERAND (arg1, 0)))
13732 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13733 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13734 TREE_OPERAND (arg1, 1)),
13735 build_zero_cst (TREE_TYPE (arg0)));
13736
13737 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13738 otherwise Y might be >= # of bits in X's type and thus e.g.
13739 (unsigned char) (1 << Y) for Y 15 might be 0.
13740 If the cast is widening, then 1 << Y should have unsigned type,
13741 otherwise if Y is number of bits in the signed shift type minus 1,
13742 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13743 31 might be 0xffffffff80000000. */
13744 if ((code == LT_EXPR || code == GE_EXPR)
13745 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13746 && CONVERT_EXPR_P (arg1)
13747 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13748 && (TYPE_PRECISION (TREE_TYPE (arg1))
13749 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13750 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13751 || (TYPE_PRECISION (TREE_TYPE (arg1))
13752 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13753 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13754 {
13755 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13756 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13757 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13758 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13759 build_zero_cst (TREE_TYPE (arg0)));
13760 }
13761
13762 return NULL_TREE;
13763
13764 case UNORDERED_EXPR:
13765 case ORDERED_EXPR:
13766 case UNLT_EXPR:
13767 case UNLE_EXPR:
13768 case UNGT_EXPR:
13769 case UNGE_EXPR:
13770 case UNEQ_EXPR:
13771 case LTGT_EXPR:
13772 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13773 {
13774 t1 = fold_relational_const (code, type, arg0, arg1);
13775 if (t1 != NULL_TREE)
13776 return t1;
13777 }
13778
13779 /* If the first operand is NaN, the result is constant. */
13780 if (TREE_CODE (arg0) == REAL_CST
13781 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13782 && (code != LTGT_EXPR || ! flag_trapping_math))
13783 {
13784 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13785 ? integer_zero_node
13786 : integer_one_node;
13787 return omit_one_operand_loc (loc, type, t1, arg1);
13788 }
13789
13790 /* If the second operand is NaN, the result is constant. */
13791 if (TREE_CODE (arg1) == REAL_CST
13792 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13793 && (code != LTGT_EXPR || ! flag_trapping_math))
13794 {
13795 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13796 ? integer_zero_node
13797 : integer_one_node;
13798 return omit_one_operand_loc (loc, type, t1, arg0);
13799 }
13800
13801 /* Simplify unordered comparison of something with itself. */
13802 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13803 && operand_equal_p (arg0, arg1, 0))
13804 return constant_boolean_node (1, type);
13805
13806 if (code == LTGT_EXPR
13807 && !flag_trapping_math
13808 && operand_equal_p (arg0, arg1, 0))
13809 return constant_boolean_node (0, type);
13810
13811 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13812 {
13813 tree targ0 = strip_float_extensions (arg0);
13814 tree targ1 = strip_float_extensions (arg1);
13815 tree newtype = TREE_TYPE (targ0);
13816
13817 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13818 newtype = TREE_TYPE (targ1);
13819
13820 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13821 return fold_build2_loc (loc, code, type,
13822 fold_convert_loc (loc, newtype, targ0),
13823 fold_convert_loc (loc, newtype, targ1));
13824 }
13825
13826 return NULL_TREE;
13827
13828 case COMPOUND_EXPR:
13829 /* When pedantic, a compound expression can be neither an lvalue
13830 nor an integer constant expression. */
13831 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13832 return NULL_TREE;
13833 /* Don't let (0, 0) be null pointer constant. */
13834 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13835 : fold_convert_loc (loc, type, arg1);
13836 return pedantic_non_lvalue_loc (loc, tem);
13837
13838 case COMPLEX_EXPR:
13839 if ((TREE_CODE (arg0) == REAL_CST
13840 && TREE_CODE (arg1) == REAL_CST)
13841 || (TREE_CODE (arg0) == INTEGER_CST
13842 && TREE_CODE (arg1) == INTEGER_CST))
13843 return build_complex (type, arg0, arg1);
13844 if (TREE_CODE (arg0) == REALPART_EXPR
13845 && TREE_CODE (arg1) == IMAGPART_EXPR
13846 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13847 && operand_equal_p (TREE_OPERAND (arg0, 0),
13848 TREE_OPERAND (arg1, 0), 0))
13849 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13850 TREE_OPERAND (arg1, 0));
13851 return NULL_TREE;
13852
13853 case ASSERT_EXPR:
13854 /* An ASSERT_EXPR should never be passed to fold_binary. */
13855 gcc_unreachable ();
13856
13857 case VEC_PACK_TRUNC_EXPR:
13858 case VEC_PACK_FIX_TRUNC_EXPR:
13859 {
13860 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13861 tree *elts;
13862
13863 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13864 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13865 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13866 return NULL_TREE;
13867
13868 elts = XALLOCAVEC (tree, nelts);
13869 if (!vec_cst_ctor_to_array (arg0, elts)
13870 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13871 return NULL_TREE;
13872
13873 for (i = 0; i < nelts; i++)
13874 {
13875 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13876 ? NOP_EXPR : FIX_TRUNC_EXPR,
13877 TREE_TYPE (type), elts[i]);
13878 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13879 return NULL_TREE;
13880 }
13881
13882 return build_vector (type, elts);
13883 }
13884
13885 case VEC_WIDEN_MULT_LO_EXPR:
13886 case VEC_WIDEN_MULT_HI_EXPR:
13887 case VEC_WIDEN_MULT_EVEN_EXPR:
13888 case VEC_WIDEN_MULT_ODD_EXPR:
13889 {
13890 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13891 unsigned int out, ofs, scale;
13892 tree *elts;
13893
13894 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13895 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13896 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13897 return NULL_TREE;
13898
13899 elts = XALLOCAVEC (tree, nelts * 4);
13900 if (!vec_cst_ctor_to_array (arg0, elts)
13901 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13902 return NULL_TREE;
13903
13904 if (code == VEC_WIDEN_MULT_LO_EXPR)
13905 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13906 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13907 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13908 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13909 scale = 1, ofs = 0;
13910 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13911 scale = 1, ofs = 1;
13912
13913 for (out = 0; out < nelts; out++)
13914 {
13915 unsigned int in1 = (out << scale) + ofs;
13916 unsigned int in2 = in1 + nelts * 2;
13917 tree t1, t2;
13918
13919 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13920 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13921
13922 if (t1 == NULL_TREE || t2 == NULL_TREE)
13923 return NULL_TREE;
13924 elts[out] = const_binop (MULT_EXPR, t1, t2);
13925 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13926 return NULL_TREE;
13927 }
13928
13929 return build_vector (type, elts);
13930 }
13931
13932 default:
13933 return NULL_TREE;
13934 } /* switch (code) */
13935 }
13936
13937 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13938 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13939 of GOTO_EXPR. */
13940
13941 static tree
13942 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13943 {
13944 switch (TREE_CODE (*tp))
13945 {
13946 case LABEL_EXPR:
13947 return *tp;
13948
13949 case GOTO_EXPR:
13950 *walk_subtrees = 0;
13951
13952 /* ... fall through ... */
13953
13954 default:
13955 return NULL_TREE;
13956 }
13957 }
13958
13959 /* Return whether the sub-tree ST contains a label which is accessible from
13960 outside the sub-tree. */
13961
13962 static bool
13963 contains_label_p (tree st)
13964 {
13965 return
13966 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13967 }
13968
13969 /* Fold a ternary expression of code CODE and type TYPE with operands
13970 OP0, OP1, and OP2. Return the folded expression if folding is
13971 successful. Otherwise, return NULL_TREE. */
13972
13973 tree
13974 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13975 tree op0, tree op1, tree op2)
13976 {
13977 tree tem;
13978 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13979 enum tree_code_class kind = TREE_CODE_CLASS (code);
13980
13981 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13982 && TREE_CODE_LENGTH (code) == 3);
13983
13984 /* Strip any conversions that don't change the mode. This is safe
13985 for every expression, except for a comparison expression because
13986 its signedness is derived from its operands. So, in the latter
13987 case, only strip conversions that don't change the signedness.
13988
13989 Note that this is done as an internal manipulation within the
13990 constant folder, in order to find the simplest representation of
13991 the arguments so that their form can be studied. In any cases,
13992 the appropriate type conversions should be put back in the tree
13993 that will get out of the constant folder. */
13994 if (op0)
13995 {
13996 arg0 = op0;
13997 STRIP_NOPS (arg0);
13998 }
13999
14000 if (op1)
14001 {
14002 arg1 = op1;
14003 STRIP_NOPS (arg1);
14004 }
14005
14006 if (op2)
14007 {
14008 arg2 = op2;
14009 STRIP_NOPS (arg2);
14010 }
14011
14012 switch (code)
14013 {
14014 case COMPONENT_REF:
14015 if (TREE_CODE (arg0) == CONSTRUCTOR
14016 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14017 {
14018 unsigned HOST_WIDE_INT idx;
14019 tree field, value;
14020 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14021 if (field == arg1)
14022 return value;
14023 }
14024 return NULL_TREE;
14025
14026 case COND_EXPR:
14027 case VEC_COND_EXPR:
14028 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14029 so all simple results must be passed through pedantic_non_lvalue. */
14030 if (TREE_CODE (arg0) == INTEGER_CST)
14031 {
14032 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14033 tem = integer_zerop (arg0) ? op2 : op1;
14034 /* Only optimize constant conditions when the selected branch
14035 has the same type as the COND_EXPR. This avoids optimizing
14036 away "c ? x : throw", where the throw has a void type.
14037 Avoid throwing away that operand which contains label. */
14038 if ((!TREE_SIDE_EFFECTS (unused_op)
14039 || !contains_label_p (unused_op))
14040 && (! VOID_TYPE_P (TREE_TYPE (tem))
14041 || VOID_TYPE_P (type)))
14042 return pedantic_non_lvalue_loc (loc, tem);
14043 return NULL_TREE;
14044 }
14045 else if (TREE_CODE (arg0) == VECTOR_CST)
14046 {
14047 if (integer_all_onesp (arg0))
14048 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14049 if (integer_zerop (arg0))
14050 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14051
14052 if ((TREE_CODE (arg1) == VECTOR_CST
14053 || TREE_CODE (arg1) == CONSTRUCTOR)
14054 && (TREE_CODE (arg2) == VECTOR_CST
14055 || TREE_CODE (arg2) == CONSTRUCTOR))
14056 {
14057 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14058 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14059 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14060 for (i = 0; i < nelts; i++)
14061 {
14062 tree val = VECTOR_CST_ELT (arg0, i);
14063 if (integer_all_onesp (val))
14064 sel[i] = i;
14065 else if (integer_zerop (val))
14066 sel[i] = nelts + i;
14067 else /* Currently unreachable. */
14068 return NULL_TREE;
14069 }
14070 tree t = fold_vec_perm (type, arg1, arg2, sel);
14071 if (t != NULL_TREE)
14072 return t;
14073 }
14074 }
14075
14076 if (operand_equal_p (arg1, op2, 0))
14077 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14078
14079 /* If we have A op B ? A : C, we may be able to convert this to a
14080 simpler expression, depending on the operation and the values
14081 of B and C. Signed zeros prevent all of these transformations,
14082 for reasons given above each one.
14083
14084 Also try swapping the arguments and inverting the conditional. */
14085 if (COMPARISON_CLASS_P (arg0)
14086 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14087 arg1, TREE_OPERAND (arg0, 1))
14088 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14089 {
14090 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14091 if (tem)
14092 return tem;
14093 }
14094
14095 if (COMPARISON_CLASS_P (arg0)
14096 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14097 op2,
14098 TREE_OPERAND (arg0, 1))
14099 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14100 {
14101 location_t loc0 = expr_location_or (arg0, loc);
14102 tem = fold_invert_truthvalue (loc0, arg0);
14103 if (tem && COMPARISON_CLASS_P (tem))
14104 {
14105 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14106 if (tem)
14107 return tem;
14108 }
14109 }
14110
14111 /* If the second operand is simpler than the third, swap them
14112 since that produces better jump optimization results. */
14113 if (truth_value_p (TREE_CODE (arg0))
14114 && tree_swap_operands_p (op1, op2, false))
14115 {
14116 location_t loc0 = expr_location_or (arg0, loc);
14117 /* See if this can be inverted. If it can't, possibly because
14118 it was a floating-point inequality comparison, don't do
14119 anything. */
14120 tem = fold_invert_truthvalue (loc0, arg0);
14121 if (tem)
14122 return fold_build3_loc (loc, code, type, tem, op2, op1);
14123 }
14124
14125 /* Convert A ? 1 : 0 to simply A. */
14126 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14127 : (integer_onep (op1)
14128 && !VECTOR_TYPE_P (type)))
14129 && integer_zerop (op2)
14130 /* If we try to convert OP0 to our type, the
14131 call to fold will try to move the conversion inside
14132 a COND, which will recurse. In that case, the COND_EXPR
14133 is probably the best choice, so leave it alone. */
14134 && type == TREE_TYPE (arg0))
14135 return pedantic_non_lvalue_loc (loc, arg0);
14136
14137 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14138 over COND_EXPR in cases such as floating point comparisons. */
14139 if (integer_zerop (op1)
14140 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14141 : (integer_onep (op2)
14142 && !VECTOR_TYPE_P (type)))
14143 && truth_value_p (TREE_CODE (arg0)))
14144 return pedantic_non_lvalue_loc (loc,
14145 fold_convert_loc (loc, type,
14146 invert_truthvalue_loc (loc,
14147 arg0)));
14148
14149 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14150 if (TREE_CODE (arg0) == LT_EXPR
14151 && integer_zerop (TREE_OPERAND (arg0, 1))
14152 && integer_zerop (op2)
14153 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14154 {
14155 /* sign_bit_p only checks ARG1 bits within A's precision.
14156 If <sign bit of A> has wider type than A, bits outside
14157 of A's precision in <sign bit of A> need to be checked.
14158 If they are all 0, this optimization needs to be done
14159 in unsigned A's type, if they are all 1 in signed A's type,
14160 otherwise this can't be done. */
14161 if (TYPE_PRECISION (TREE_TYPE (tem))
14162 < TYPE_PRECISION (TREE_TYPE (arg1))
14163 && TYPE_PRECISION (TREE_TYPE (tem))
14164 < TYPE_PRECISION (type))
14165 {
14166 unsigned HOST_WIDE_INT mask_lo;
14167 HOST_WIDE_INT mask_hi;
14168 int inner_width, outer_width;
14169 tree tem_type;
14170
14171 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14172 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14173 if (outer_width > TYPE_PRECISION (type))
14174 outer_width = TYPE_PRECISION (type);
14175
14176 if (outer_width > HOST_BITS_PER_WIDE_INT)
14177 {
14178 mask_hi = ((unsigned HOST_WIDE_INT) -1
14179 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14180 mask_lo = -1;
14181 }
14182 else
14183 {
14184 mask_hi = 0;
14185 mask_lo = ((unsigned HOST_WIDE_INT) -1
14186 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14187 }
14188 if (inner_width > HOST_BITS_PER_WIDE_INT)
14189 {
14190 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14191 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14192 mask_lo = 0;
14193 }
14194 else
14195 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14196 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14197
14198 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14199 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14200 {
14201 tem_type = signed_type_for (TREE_TYPE (tem));
14202 tem = fold_convert_loc (loc, tem_type, tem);
14203 }
14204 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14205 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14206 {
14207 tem_type = unsigned_type_for (TREE_TYPE (tem));
14208 tem = fold_convert_loc (loc, tem_type, tem);
14209 }
14210 else
14211 tem = NULL;
14212 }
14213
14214 if (tem)
14215 return
14216 fold_convert_loc (loc, type,
14217 fold_build2_loc (loc, BIT_AND_EXPR,
14218 TREE_TYPE (tem), tem,
14219 fold_convert_loc (loc,
14220 TREE_TYPE (tem),
14221 arg1)));
14222 }
14223
14224 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14225 already handled above. */
14226 if (TREE_CODE (arg0) == BIT_AND_EXPR
14227 && integer_onep (TREE_OPERAND (arg0, 1))
14228 && integer_zerop (op2)
14229 && integer_pow2p (arg1))
14230 {
14231 tree tem = TREE_OPERAND (arg0, 0);
14232 STRIP_NOPS (tem);
14233 if (TREE_CODE (tem) == RSHIFT_EXPR
14234 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14235 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14236 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14237 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14238 TREE_OPERAND (tem, 0), arg1);
14239 }
14240
14241 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14242 is probably obsolete because the first operand should be a
14243 truth value (that's why we have the two cases above), but let's
14244 leave it in until we can confirm this for all front-ends. */
14245 if (integer_zerop (op2)
14246 && TREE_CODE (arg0) == NE_EXPR
14247 && integer_zerop (TREE_OPERAND (arg0, 1))
14248 && integer_pow2p (arg1)
14249 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14250 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14251 arg1, OEP_ONLY_CONST))
14252 return pedantic_non_lvalue_loc (loc,
14253 fold_convert_loc (loc, type,
14254 TREE_OPERAND (arg0, 0)));
14255
14256 /* Disable the transformations below for vectors, since
14257 fold_binary_op_with_conditional_arg may undo them immediately,
14258 yielding an infinite loop. */
14259 if (code == VEC_COND_EXPR)
14260 return NULL_TREE;
14261
14262 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14263 if (integer_zerop (op2)
14264 && truth_value_p (TREE_CODE (arg0))
14265 && truth_value_p (TREE_CODE (arg1))
14266 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14267 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14268 : TRUTH_ANDIF_EXPR,
14269 type, fold_convert_loc (loc, type, arg0), arg1);
14270
14271 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14272 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14273 && truth_value_p (TREE_CODE (arg0))
14274 && truth_value_p (TREE_CODE (arg1))
14275 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14276 {
14277 location_t loc0 = expr_location_or (arg0, loc);
14278 /* Only perform transformation if ARG0 is easily inverted. */
14279 tem = fold_invert_truthvalue (loc0, arg0);
14280 if (tem)
14281 return fold_build2_loc (loc, code == VEC_COND_EXPR
14282 ? BIT_IOR_EXPR
14283 : TRUTH_ORIF_EXPR,
14284 type, fold_convert_loc (loc, type, tem),
14285 arg1);
14286 }
14287
14288 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14289 if (integer_zerop (arg1)
14290 && truth_value_p (TREE_CODE (arg0))
14291 && truth_value_p (TREE_CODE (op2))
14292 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14293 {
14294 location_t loc0 = expr_location_or (arg0, loc);
14295 /* Only perform transformation if ARG0 is easily inverted. */
14296 tem = fold_invert_truthvalue (loc0, arg0);
14297 if (tem)
14298 return fold_build2_loc (loc, code == VEC_COND_EXPR
14299 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14300 type, fold_convert_loc (loc, type, tem),
14301 op2);
14302 }
14303
14304 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14305 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14306 && truth_value_p (TREE_CODE (arg0))
14307 && truth_value_p (TREE_CODE (op2))
14308 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14309 return fold_build2_loc (loc, code == VEC_COND_EXPR
14310 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14311 type, fold_convert_loc (loc, type, arg0), op2);
14312
14313 return NULL_TREE;
14314
14315 case CALL_EXPR:
14316 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14317 of fold_ternary on them. */
14318 gcc_unreachable ();
14319
14320 case BIT_FIELD_REF:
14321 if ((TREE_CODE (arg0) == VECTOR_CST
14322 || (TREE_CODE (arg0) == CONSTRUCTOR
14323 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14324 && (type == TREE_TYPE (TREE_TYPE (arg0))
14325 || (TREE_CODE (type) == VECTOR_TYPE
14326 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14327 {
14328 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14329 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14330 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14331 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14332
14333 if (n != 0
14334 && (idx % width) == 0
14335 && (n % width) == 0
14336 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14337 {
14338 idx = idx / width;
14339 n = n / width;
14340
14341 if (TREE_CODE (arg0) == VECTOR_CST)
14342 {
14343 if (n == 1)
14344 return VECTOR_CST_ELT (arg0, idx);
14345
14346 tree *vals = XALLOCAVEC (tree, n);
14347 for (unsigned i = 0; i < n; ++i)
14348 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14349 return build_vector (type, vals);
14350 }
14351
14352 /* Constructor elements can be subvectors. */
14353 unsigned HOST_WIDE_INT k = 1;
14354 if (CONSTRUCTOR_NELTS (arg0) != 0)
14355 {
14356 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14357 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14358 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14359 }
14360
14361 /* We keep an exact subset of the constructor elements. */
14362 if ((idx % k) == 0 && (n % k) == 0)
14363 {
14364 if (CONSTRUCTOR_NELTS (arg0) == 0)
14365 return build_constructor (type, NULL);
14366 idx /= k;
14367 n /= k;
14368 if (n == 1)
14369 {
14370 if (idx < CONSTRUCTOR_NELTS (arg0))
14371 return CONSTRUCTOR_ELT (arg0, idx)->value;
14372 return build_zero_cst (type);
14373 }
14374
14375 vec<constructor_elt, va_gc> *vals;
14376 vec_alloc (vals, n);
14377 for (unsigned i = 0;
14378 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14379 ++i)
14380 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14381 CONSTRUCTOR_ELT
14382 (arg0, idx + i)->value);
14383 return build_constructor (type, vals);
14384 }
14385 /* The bitfield references a single constructor element. */
14386 else if (idx + n <= (idx / k + 1) * k)
14387 {
14388 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14389 return build_zero_cst (type);
14390 else if (n == k)
14391 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14392 else
14393 return fold_build3_loc (loc, code, type,
14394 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14395 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14396 }
14397 }
14398 }
14399
14400 /* A bit-field-ref that referenced the full argument can be stripped. */
14401 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14402 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14403 && integer_zerop (op2))
14404 return fold_convert_loc (loc, type, arg0);
14405
14406 /* On constants we can use native encode/interpret to constant
14407 fold (nearly) all BIT_FIELD_REFs. */
14408 if (CONSTANT_CLASS_P (arg0)
14409 && can_native_interpret_type_p (type)
14410 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14411 /* This limitation should not be necessary, we just need to
14412 round this up to mode size. */
14413 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14414 /* Need bit-shifting of the buffer to relax the following. */
14415 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14416 {
14417 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14418 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14419 unsigned HOST_WIDE_INT clen;
14420 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14421 /* ??? We cannot tell native_encode_expr to start at
14422 some random byte only. So limit us to a reasonable amount
14423 of work. */
14424 if (clen <= 4096)
14425 {
14426 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14427 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14428 if (len > 0
14429 && len * BITS_PER_UNIT >= bitpos + bitsize)
14430 {
14431 tree v = native_interpret_expr (type,
14432 b + bitpos / BITS_PER_UNIT,
14433 bitsize / BITS_PER_UNIT);
14434 if (v)
14435 return v;
14436 }
14437 }
14438 }
14439
14440 return NULL_TREE;
14441
14442 case FMA_EXPR:
14443 /* For integers we can decompose the FMA if possible. */
14444 if (TREE_CODE (arg0) == INTEGER_CST
14445 && TREE_CODE (arg1) == INTEGER_CST)
14446 return fold_build2_loc (loc, PLUS_EXPR, type,
14447 const_binop (MULT_EXPR, arg0, arg1), arg2);
14448 if (integer_zerop (arg2))
14449 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14450
14451 return fold_fma (loc, type, arg0, arg1, arg2);
14452
14453 case VEC_PERM_EXPR:
14454 if (TREE_CODE (arg2) == VECTOR_CST)
14455 {
14456 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14457 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14458 tree t;
14459 bool need_mask_canon = false;
14460 bool all_in_vec0 = true;
14461 bool all_in_vec1 = true;
14462 bool maybe_identity = true;
14463 bool single_arg = (op0 == op1);
14464 bool changed = false;
14465
14466 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14467 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14468 for (i = 0; i < nelts; i++)
14469 {
14470 tree val = VECTOR_CST_ELT (arg2, i);
14471 if (TREE_CODE (val) != INTEGER_CST)
14472 return NULL_TREE;
14473
14474 sel[i] = TREE_INT_CST_LOW (val) & mask;
14475 if (TREE_INT_CST_HIGH (val)
14476 || ((unsigned HOST_WIDE_INT)
14477 TREE_INT_CST_LOW (val) != sel[i]))
14478 need_mask_canon = true;
14479
14480 if (sel[i] < nelts)
14481 all_in_vec1 = false;
14482 else
14483 all_in_vec0 = false;
14484
14485 if ((sel[i] & (nelts-1)) != i)
14486 maybe_identity = false;
14487 }
14488
14489 if (maybe_identity)
14490 {
14491 if (all_in_vec0)
14492 return op0;
14493 if (all_in_vec1)
14494 return op1;
14495 }
14496
14497 if (all_in_vec0)
14498 op1 = op0;
14499 else if (all_in_vec1)
14500 {
14501 op0 = op1;
14502 for (i = 0; i < nelts; i++)
14503 sel[i] -= nelts;
14504 need_mask_canon = true;
14505 }
14506
14507 if ((TREE_CODE (op0) == VECTOR_CST
14508 || TREE_CODE (op0) == CONSTRUCTOR)
14509 && (TREE_CODE (op1) == VECTOR_CST
14510 || TREE_CODE (op1) == CONSTRUCTOR))
14511 {
14512 t = fold_vec_perm (type, op0, op1, sel);
14513 if (t != NULL_TREE)
14514 return t;
14515 }
14516
14517 if (op0 == op1 && !single_arg)
14518 changed = true;
14519
14520 if (need_mask_canon && arg2 == op2)
14521 {
14522 tree *tsel = XALLOCAVEC (tree, nelts);
14523 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14524 for (i = 0; i < nelts; i++)
14525 tsel[i] = build_int_cst (eltype, sel[i]);
14526 op2 = build_vector (TREE_TYPE (arg2), tsel);
14527 changed = true;
14528 }
14529
14530 if (changed)
14531 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14532 }
14533 return NULL_TREE;
14534
14535 default:
14536 return NULL_TREE;
14537 } /* switch (code) */
14538 }
14539
14540 /* Perform constant folding and related simplification of EXPR.
14541 The related simplifications include x*1 => x, x*0 => 0, etc.,
14542 and application of the associative law.
14543 NOP_EXPR conversions may be removed freely (as long as we
14544 are careful not to change the type of the overall expression).
14545 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14546 but we can constant-fold them if they have constant operands. */
14547
14548 #ifdef ENABLE_FOLD_CHECKING
14549 # define fold(x) fold_1 (x)
14550 static tree fold_1 (tree);
14551 static
14552 #endif
14553 tree
14554 fold (tree expr)
14555 {
14556 const tree t = expr;
14557 enum tree_code code = TREE_CODE (t);
14558 enum tree_code_class kind = TREE_CODE_CLASS (code);
14559 tree tem;
14560 location_t loc = EXPR_LOCATION (expr);
14561
14562 /* Return right away if a constant. */
14563 if (kind == tcc_constant)
14564 return t;
14565
14566 /* CALL_EXPR-like objects with variable numbers of operands are
14567 treated specially. */
14568 if (kind == tcc_vl_exp)
14569 {
14570 if (code == CALL_EXPR)
14571 {
14572 tem = fold_call_expr (loc, expr, false);
14573 return tem ? tem : expr;
14574 }
14575 return expr;
14576 }
14577
14578 if (IS_EXPR_CODE_CLASS (kind))
14579 {
14580 tree type = TREE_TYPE (t);
14581 tree op0, op1, op2;
14582
14583 switch (TREE_CODE_LENGTH (code))
14584 {
14585 case 1:
14586 op0 = TREE_OPERAND (t, 0);
14587 tem = fold_unary_loc (loc, code, type, op0);
14588 return tem ? tem : expr;
14589 case 2:
14590 op0 = TREE_OPERAND (t, 0);
14591 op1 = TREE_OPERAND (t, 1);
14592 tem = fold_binary_loc (loc, code, type, op0, op1);
14593 return tem ? tem : expr;
14594 case 3:
14595 op0 = TREE_OPERAND (t, 0);
14596 op1 = TREE_OPERAND (t, 1);
14597 op2 = TREE_OPERAND (t, 2);
14598 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14599 return tem ? tem : expr;
14600 default:
14601 break;
14602 }
14603 }
14604
14605 switch (code)
14606 {
14607 case ARRAY_REF:
14608 {
14609 tree op0 = TREE_OPERAND (t, 0);
14610 tree op1 = TREE_OPERAND (t, 1);
14611
14612 if (TREE_CODE (op1) == INTEGER_CST
14613 && TREE_CODE (op0) == CONSTRUCTOR
14614 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14615 {
14616 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14617 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14618 unsigned HOST_WIDE_INT begin = 0;
14619
14620 /* Find a matching index by means of a binary search. */
14621 while (begin != end)
14622 {
14623 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14624 tree index = (*elts)[middle].index;
14625
14626 if (TREE_CODE (index) == INTEGER_CST
14627 && tree_int_cst_lt (index, op1))
14628 begin = middle + 1;
14629 else if (TREE_CODE (index) == INTEGER_CST
14630 && tree_int_cst_lt (op1, index))
14631 end = middle;
14632 else if (TREE_CODE (index) == RANGE_EXPR
14633 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14634 begin = middle + 1;
14635 else if (TREE_CODE (index) == RANGE_EXPR
14636 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14637 end = middle;
14638 else
14639 return (*elts)[middle].value;
14640 }
14641 }
14642
14643 return t;
14644 }
14645
14646 /* Return a VECTOR_CST if possible. */
14647 case CONSTRUCTOR:
14648 {
14649 tree type = TREE_TYPE (t);
14650 if (TREE_CODE (type) != VECTOR_TYPE)
14651 return t;
14652
14653 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14654 unsigned HOST_WIDE_INT idx, pos = 0;
14655 tree value;
14656
14657 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14658 {
14659 if (!CONSTANT_CLASS_P (value))
14660 return t;
14661 if (TREE_CODE (value) == VECTOR_CST)
14662 {
14663 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14664 vec[pos++] = VECTOR_CST_ELT (value, i);
14665 }
14666 else
14667 vec[pos++] = value;
14668 }
14669 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14670 vec[pos] = build_zero_cst (TREE_TYPE (type));
14671
14672 return build_vector (type, vec);
14673 }
14674
14675 case CONST_DECL:
14676 return fold (DECL_INITIAL (t));
14677
14678 default:
14679 return t;
14680 } /* switch (code) */
14681 }
14682
14683 #ifdef ENABLE_FOLD_CHECKING
14684 #undef fold
14685
14686 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14687 hash_table <pointer_hash <tree_node> >);
14688 static void fold_check_failed (const_tree, const_tree);
14689 void print_fold_checksum (const_tree);
14690
14691 /* When --enable-checking=fold, compute a digest of expr before
14692 and after actual fold call to see if fold did not accidentally
14693 change original expr. */
14694
14695 tree
14696 fold (tree expr)
14697 {
14698 tree ret;
14699 struct md5_ctx ctx;
14700 unsigned char checksum_before[16], checksum_after[16];
14701 hash_table <pointer_hash <tree_node> > ht;
14702
14703 ht.create (32);
14704 md5_init_ctx (&ctx);
14705 fold_checksum_tree (expr, &ctx, ht);
14706 md5_finish_ctx (&ctx, checksum_before);
14707 ht.empty ();
14708
14709 ret = fold_1 (expr);
14710
14711 md5_init_ctx (&ctx);
14712 fold_checksum_tree (expr, &ctx, ht);
14713 md5_finish_ctx (&ctx, checksum_after);
14714 ht.dispose ();
14715
14716 if (memcmp (checksum_before, checksum_after, 16))
14717 fold_check_failed (expr, ret);
14718
14719 return ret;
14720 }
14721
14722 void
14723 print_fold_checksum (const_tree expr)
14724 {
14725 struct md5_ctx ctx;
14726 unsigned char checksum[16], cnt;
14727 hash_table <pointer_hash <tree_node> > ht;
14728
14729 ht.create (32);
14730 md5_init_ctx (&ctx);
14731 fold_checksum_tree (expr, &ctx, ht);
14732 md5_finish_ctx (&ctx, checksum);
14733 ht.dispose ();
14734 for (cnt = 0; cnt < 16; ++cnt)
14735 fprintf (stderr, "%02x", checksum[cnt]);
14736 putc ('\n', stderr);
14737 }
14738
14739 static void
14740 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14741 {
14742 internal_error ("fold check: original tree changed by fold");
14743 }
14744
14745 static void
14746 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14747 hash_table <pointer_hash <tree_node> > ht)
14748 {
14749 tree_node **slot;
14750 enum tree_code code;
14751 union tree_node buf;
14752 int i, len;
14753
14754 recursive_label:
14755 if (expr == NULL)
14756 return;
14757 slot = ht.find_slot (expr, INSERT);
14758 if (*slot != NULL)
14759 return;
14760 *slot = CONST_CAST_TREE (expr);
14761 code = TREE_CODE (expr);
14762 if (TREE_CODE_CLASS (code) == tcc_declaration
14763 && DECL_ASSEMBLER_NAME_SET_P (expr))
14764 {
14765 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14766 memcpy ((char *) &buf, expr, tree_size (expr));
14767 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14768 expr = (tree) &buf;
14769 }
14770 else if (TREE_CODE_CLASS (code) == tcc_type
14771 && (TYPE_POINTER_TO (expr)
14772 || TYPE_REFERENCE_TO (expr)
14773 || TYPE_CACHED_VALUES_P (expr)
14774 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14775 || TYPE_NEXT_VARIANT (expr)))
14776 {
14777 /* Allow these fields to be modified. */
14778 tree tmp;
14779 memcpy ((char *) &buf, expr, tree_size (expr));
14780 expr = tmp = (tree) &buf;
14781 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14782 TYPE_POINTER_TO (tmp) = NULL;
14783 TYPE_REFERENCE_TO (tmp) = NULL;
14784 TYPE_NEXT_VARIANT (tmp) = NULL;
14785 if (TYPE_CACHED_VALUES_P (tmp))
14786 {
14787 TYPE_CACHED_VALUES_P (tmp) = 0;
14788 TYPE_CACHED_VALUES (tmp) = NULL;
14789 }
14790 }
14791 md5_process_bytes (expr, tree_size (expr), ctx);
14792 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14793 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14794 if (TREE_CODE_CLASS (code) != tcc_type
14795 && TREE_CODE_CLASS (code) != tcc_declaration
14796 && code != TREE_LIST
14797 && code != SSA_NAME
14798 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14799 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14800 switch (TREE_CODE_CLASS (code))
14801 {
14802 case tcc_constant:
14803 switch (code)
14804 {
14805 case STRING_CST:
14806 md5_process_bytes (TREE_STRING_POINTER (expr),
14807 TREE_STRING_LENGTH (expr), ctx);
14808 break;
14809 case COMPLEX_CST:
14810 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14811 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14812 break;
14813 case VECTOR_CST:
14814 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14815 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14816 break;
14817 default:
14818 break;
14819 }
14820 break;
14821 case tcc_exceptional:
14822 switch (code)
14823 {
14824 case TREE_LIST:
14825 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14826 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14827 expr = TREE_CHAIN (expr);
14828 goto recursive_label;
14829 break;
14830 case TREE_VEC:
14831 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14832 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14833 break;
14834 default:
14835 break;
14836 }
14837 break;
14838 case tcc_expression:
14839 case tcc_reference:
14840 case tcc_comparison:
14841 case tcc_unary:
14842 case tcc_binary:
14843 case tcc_statement:
14844 case tcc_vl_exp:
14845 len = TREE_OPERAND_LENGTH (expr);
14846 for (i = 0; i < len; ++i)
14847 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14848 break;
14849 case tcc_declaration:
14850 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14851 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14852 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14853 {
14854 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14855 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14856 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14857 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14858 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14859 }
14860 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14861 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14862
14863 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14864 {
14865 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14866 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14867 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14868 }
14869 break;
14870 case tcc_type:
14871 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14872 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14873 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14874 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14875 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14876 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14877 if (INTEGRAL_TYPE_P (expr)
14878 || SCALAR_FLOAT_TYPE_P (expr))
14879 {
14880 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14881 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14882 }
14883 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14884 if (TREE_CODE (expr) == RECORD_TYPE
14885 || TREE_CODE (expr) == UNION_TYPE
14886 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14887 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14888 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14889 break;
14890 default:
14891 break;
14892 }
14893 }
14894
14895 /* Helper function for outputting the checksum of a tree T. When
14896 debugging with gdb, you can "define mynext" to be "next" followed
14897 by "call debug_fold_checksum (op0)", then just trace down till the
14898 outputs differ. */
14899
14900 DEBUG_FUNCTION void
14901 debug_fold_checksum (const_tree t)
14902 {
14903 int i;
14904 unsigned char checksum[16];
14905 struct md5_ctx ctx;
14906 hash_table <pointer_hash <tree_node> > ht;
14907 ht.create (32);
14908
14909 md5_init_ctx (&ctx);
14910 fold_checksum_tree (t, &ctx, ht);
14911 md5_finish_ctx (&ctx, checksum);
14912 ht.empty ();
14913
14914 for (i = 0; i < 16; i++)
14915 fprintf (stderr, "%d ", checksum[i]);
14916
14917 fprintf (stderr, "\n");
14918 }
14919
14920 #endif
14921
14922 /* Fold a unary tree expression with code CODE of type TYPE with an
14923 operand OP0. LOC is the location of the resulting expression.
14924 Return a folded expression if successful. Otherwise, return a tree
14925 expression with code CODE of type TYPE with an operand OP0. */
14926
14927 tree
14928 fold_build1_stat_loc (location_t loc,
14929 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14930 {
14931 tree tem;
14932 #ifdef ENABLE_FOLD_CHECKING
14933 unsigned char checksum_before[16], checksum_after[16];
14934 struct md5_ctx ctx;
14935 hash_table <pointer_hash <tree_node> > ht;
14936
14937 ht.create (32);
14938 md5_init_ctx (&ctx);
14939 fold_checksum_tree (op0, &ctx, ht);
14940 md5_finish_ctx (&ctx, checksum_before);
14941 ht.empty ();
14942 #endif
14943
14944 tem = fold_unary_loc (loc, code, type, op0);
14945 if (!tem)
14946 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14947
14948 #ifdef ENABLE_FOLD_CHECKING
14949 md5_init_ctx (&ctx);
14950 fold_checksum_tree (op0, &ctx, ht);
14951 md5_finish_ctx (&ctx, checksum_after);
14952 ht.dispose ();
14953
14954 if (memcmp (checksum_before, checksum_after, 16))
14955 fold_check_failed (op0, tem);
14956 #endif
14957 return tem;
14958 }
14959
14960 /* Fold a binary tree expression with code CODE of type TYPE with
14961 operands OP0 and OP1. LOC is the location of the resulting
14962 expression. Return a folded expression if successful. Otherwise,
14963 return a tree expression with code CODE of type TYPE with operands
14964 OP0 and OP1. */
14965
14966 tree
14967 fold_build2_stat_loc (location_t loc,
14968 enum tree_code code, tree type, tree op0, tree op1
14969 MEM_STAT_DECL)
14970 {
14971 tree tem;
14972 #ifdef ENABLE_FOLD_CHECKING
14973 unsigned char checksum_before_op0[16],
14974 checksum_before_op1[16],
14975 checksum_after_op0[16],
14976 checksum_after_op1[16];
14977 struct md5_ctx ctx;
14978 hash_table <pointer_hash <tree_node> > ht;
14979
14980 ht.create (32);
14981 md5_init_ctx (&ctx);
14982 fold_checksum_tree (op0, &ctx, ht);
14983 md5_finish_ctx (&ctx, checksum_before_op0);
14984 ht.empty ();
14985
14986 md5_init_ctx (&ctx);
14987 fold_checksum_tree (op1, &ctx, ht);
14988 md5_finish_ctx (&ctx, checksum_before_op1);
14989 ht.empty ();
14990 #endif
14991
14992 tem = fold_binary_loc (loc, code, type, op0, op1);
14993 if (!tem)
14994 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14995
14996 #ifdef ENABLE_FOLD_CHECKING
14997 md5_init_ctx (&ctx);
14998 fold_checksum_tree (op0, &ctx, ht);
14999 md5_finish_ctx (&ctx, checksum_after_op0);
15000 ht.empty ();
15001
15002 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15003 fold_check_failed (op0, tem);
15004
15005 md5_init_ctx (&ctx);
15006 fold_checksum_tree (op1, &ctx, ht);
15007 md5_finish_ctx (&ctx, checksum_after_op1);
15008 ht.dispose ();
15009
15010 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15011 fold_check_failed (op1, tem);
15012 #endif
15013 return tem;
15014 }
15015
15016 /* Fold a ternary tree expression with code CODE of type TYPE with
15017 operands OP0, OP1, and OP2. Return a folded expression if
15018 successful. Otherwise, return a tree expression with code CODE of
15019 type TYPE with operands OP0, OP1, and OP2. */
15020
15021 tree
15022 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15023 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15024 {
15025 tree tem;
15026 #ifdef ENABLE_FOLD_CHECKING
15027 unsigned char checksum_before_op0[16],
15028 checksum_before_op1[16],
15029 checksum_before_op2[16],
15030 checksum_after_op0[16],
15031 checksum_after_op1[16],
15032 checksum_after_op2[16];
15033 struct md5_ctx ctx;
15034 hash_table <pointer_hash <tree_node> > ht;
15035
15036 ht.create (32);
15037 md5_init_ctx (&ctx);
15038 fold_checksum_tree (op0, &ctx, ht);
15039 md5_finish_ctx (&ctx, checksum_before_op0);
15040 ht.empty ();
15041
15042 md5_init_ctx (&ctx);
15043 fold_checksum_tree (op1, &ctx, ht);
15044 md5_finish_ctx (&ctx, checksum_before_op1);
15045 ht.empty ();
15046
15047 md5_init_ctx (&ctx);
15048 fold_checksum_tree (op2, &ctx, ht);
15049 md5_finish_ctx (&ctx, checksum_before_op2);
15050 ht.empty ();
15051 #endif
15052
15053 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15054 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15055 if (!tem)
15056 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15057
15058 #ifdef ENABLE_FOLD_CHECKING
15059 md5_init_ctx (&ctx);
15060 fold_checksum_tree (op0, &ctx, ht);
15061 md5_finish_ctx (&ctx, checksum_after_op0);
15062 ht.empty ();
15063
15064 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15065 fold_check_failed (op0, tem);
15066
15067 md5_init_ctx (&ctx);
15068 fold_checksum_tree (op1, &ctx, ht);
15069 md5_finish_ctx (&ctx, checksum_after_op1);
15070 ht.empty ();
15071
15072 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15073 fold_check_failed (op1, tem);
15074
15075 md5_init_ctx (&ctx);
15076 fold_checksum_tree (op2, &ctx, ht);
15077 md5_finish_ctx (&ctx, checksum_after_op2);
15078 ht.dispose ();
15079
15080 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15081 fold_check_failed (op2, tem);
15082 #endif
15083 return tem;
15084 }
15085
15086 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15087 arguments in ARGARRAY, and a null static chain.
15088 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15089 of type TYPE from the given operands as constructed by build_call_array. */
15090
15091 tree
15092 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15093 int nargs, tree *argarray)
15094 {
15095 tree tem;
15096 #ifdef ENABLE_FOLD_CHECKING
15097 unsigned char checksum_before_fn[16],
15098 checksum_before_arglist[16],
15099 checksum_after_fn[16],
15100 checksum_after_arglist[16];
15101 struct md5_ctx ctx;
15102 hash_table <pointer_hash <tree_node> > ht;
15103 int i;
15104
15105 ht.create (32);
15106 md5_init_ctx (&ctx);
15107 fold_checksum_tree (fn, &ctx, ht);
15108 md5_finish_ctx (&ctx, checksum_before_fn);
15109 ht.empty ();
15110
15111 md5_init_ctx (&ctx);
15112 for (i = 0; i < nargs; i++)
15113 fold_checksum_tree (argarray[i], &ctx, ht);
15114 md5_finish_ctx (&ctx, checksum_before_arglist);
15115 ht.empty ();
15116 #endif
15117
15118 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15119
15120 #ifdef ENABLE_FOLD_CHECKING
15121 md5_init_ctx (&ctx);
15122 fold_checksum_tree (fn, &ctx, ht);
15123 md5_finish_ctx (&ctx, checksum_after_fn);
15124 ht.empty ();
15125
15126 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15127 fold_check_failed (fn, tem);
15128
15129 md5_init_ctx (&ctx);
15130 for (i = 0; i < nargs; i++)
15131 fold_checksum_tree (argarray[i], &ctx, ht);
15132 md5_finish_ctx (&ctx, checksum_after_arglist);
15133 ht.dispose ();
15134
15135 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15136 fold_check_failed (NULL_TREE, tem);
15137 #endif
15138 return tem;
15139 }
15140
15141 /* Perform constant folding and related simplification of initializer
15142 expression EXPR. These behave identically to "fold_buildN" but ignore
15143 potential run-time traps and exceptions that fold must preserve. */
15144
15145 #define START_FOLD_INIT \
15146 int saved_signaling_nans = flag_signaling_nans;\
15147 int saved_trapping_math = flag_trapping_math;\
15148 int saved_rounding_math = flag_rounding_math;\
15149 int saved_trapv = flag_trapv;\
15150 int saved_folding_initializer = folding_initializer;\
15151 flag_signaling_nans = 0;\
15152 flag_trapping_math = 0;\
15153 flag_rounding_math = 0;\
15154 flag_trapv = 0;\
15155 folding_initializer = 1;
15156
15157 #define END_FOLD_INIT \
15158 flag_signaling_nans = saved_signaling_nans;\
15159 flag_trapping_math = saved_trapping_math;\
15160 flag_rounding_math = saved_rounding_math;\
15161 flag_trapv = saved_trapv;\
15162 folding_initializer = saved_folding_initializer;
15163
15164 tree
15165 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15166 tree type, tree op)
15167 {
15168 tree result;
15169 START_FOLD_INIT;
15170
15171 result = fold_build1_loc (loc, code, type, op);
15172
15173 END_FOLD_INIT;
15174 return result;
15175 }
15176
15177 tree
15178 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15179 tree type, tree op0, tree op1)
15180 {
15181 tree result;
15182 START_FOLD_INIT;
15183
15184 result = fold_build2_loc (loc, code, type, op0, op1);
15185
15186 END_FOLD_INIT;
15187 return result;
15188 }
15189
15190 tree
15191 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15192 tree type, tree op0, tree op1, tree op2)
15193 {
15194 tree result;
15195 START_FOLD_INIT;
15196
15197 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15198
15199 END_FOLD_INIT;
15200 return result;
15201 }
15202
15203 tree
15204 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15205 int nargs, tree *argarray)
15206 {
15207 tree result;
15208 START_FOLD_INIT;
15209
15210 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15211
15212 END_FOLD_INIT;
15213 return result;
15214 }
15215
15216 #undef START_FOLD_INIT
15217 #undef END_FOLD_INIT
15218
15219 /* Determine if first argument is a multiple of second argument. Return 0 if
15220 it is not, or we cannot easily determined it to be.
15221
15222 An example of the sort of thing we care about (at this point; this routine
15223 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15224 fold cases do now) is discovering that
15225
15226 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15227
15228 is a multiple of
15229
15230 SAVE_EXPR (J * 8)
15231
15232 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15233
15234 This code also handles discovering that
15235
15236 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15237
15238 is a multiple of 8 so we don't have to worry about dealing with a
15239 possible remainder.
15240
15241 Note that we *look* inside a SAVE_EXPR only to determine how it was
15242 calculated; it is not safe for fold to do much of anything else with the
15243 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15244 at run time. For example, the latter example above *cannot* be implemented
15245 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15246 evaluation time of the original SAVE_EXPR is not necessarily the same at
15247 the time the new expression is evaluated. The only optimization of this
15248 sort that would be valid is changing
15249
15250 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15251
15252 divided by 8 to
15253
15254 SAVE_EXPR (I) * SAVE_EXPR (J)
15255
15256 (where the same SAVE_EXPR (J) is used in the original and the
15257 transformed version). */
15258
15259 int
15260 multiple_of_p (tree type, const_tree top, const_tree bottom)
15261 {
15262 if (operand_equal_p (top, bottom, 0))
15263 return 1;
15264
15265 if (TREE_CODE (type) != INTEGER_TYPE)
15266 return 0;
15267
15268 switch (TREE_CODE (top))
15269 {
15270 case BIT_AND_EXPR:
15271 /* Bitwise and provides a power of two multiple. If the mask is
15272 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15273 if (!integer_pow2p (bottom))
15274 return 0;
15275 /* FALLTHRU */
15276
15277 case MULT_EXPR:
15278 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15279 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15280
15281 case PLUS_EXPR:
15282 case MINUS_EXPR:
15283 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15284 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15285
15286 case LSHIFT_EXPR:
15287 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15288 {
15289 tree op1, t1;
15290
15291 op1 = TREE_OPERAND (top, 1);
15292 /* const_binop may not detect overflow correctly,
15293 so check for it explicitly here. */
15294 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15295 > TREE_INT_CST_LOW (op1)
15296 && TREE_INT_CST_HIGH (op1) == 0
15297 && 0 != (t1 = fold_convert (type,
15298 const_binop (LSHIFT_EXPR,
15299 size_one_node,
15300 op1)))
15301 && !TREE_OVERFLOW (t1))
15302 return multiple_of_p (type, t1, bottom);
15303 }
15304 return 0;
15305
15306 case NOP_EXPR:
15307 /* Can't handle conversions from non-integral or wider integral type. */
15308 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15309 || (TYPE_PRECISION (type)
15310 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15311 return 0;
15312
15313 /* .. fall through ... */
15314
15315 case SAVE_EXPR:
15316 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15317
15318 case COND_EXPR:
15319 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15320 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15321
15322 case INTEGER_CST:
15323 if (TREE_CODE (bottom) != INTEGER_CST
15324 || integer_zerop (bottom)
15325 || (TYPE_UNSIGNED (type)
15326 && (tree_int_cst_sgn (top) < 0
15327 || tree_int_cst_sgn (bottom) < 0)))
15328 return 0;
15329 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15330 top, bottom));
15331
15332 default:
15333 return 0;
15334 }
15335 }
15336
15337 /* Return true if CODE or TYPE is known to be non-negative. */
15338
15339 static bool
15340 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15341 {
15342 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15343 && truth_value_p (code))
15344 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15345 have a signed:1 type (where the value is -1 and 0). */
15346 return true;
15347 return false;
15348 }
15349
15350 /* Return true if (CODE OP0) is known to be non-negative. If the return
15351 value is based on the assumption that signed overflow is undefined,
15352 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15353 *STRICT_OVERFLOW_P. */
15354
15355 bool
15356 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15357 bool *strict_overflow_p)
15358 {
15359 if (TYPE_UNSIGNED (type))
15360 return true;
15361
15362 switch (code)
15363 {
15364 case ABS_EXPR:
15365 /* We can't return 1 if flag_wrapv is set because
15366 ABS_EXPR<INT_MIN> = INT_MIN. */
15367 if (!INTEGRAL_TYPE_P (type))
15368 return true;
15369 if (TYPE_OVERFLOW_UNDEFINED (type))
15370 {
15371 *strict_overflow_p = true;
15372 return true;
15373 }
15374 break;
15375
15376 case NON_LVALUE_EXPR:
15377 case FLOAT_EXPR:
15378 case FIX_TRUNC_EXPR:
15379 return tree_expr_nonnegative_warnv_p (op0,
15380 strict_overflow_p);
15381
15382 case NOP_EXPR:
15383 {
15384 tree inner_type = TREE_TYPE (op0);
15385 tree outer_type = type;
15386
15387 if (TREE_CODE (outer_type) == REAL_TYPE)
15388 {
15389 if (TREE_CODE (inner_type) == REAL_TYPE)
15390 return tree_expr_nonnegative_warnv_p (op0,
15391 strict_overflow_p);
15392 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15393 {
15394 if (TYPE_UNSIGNED (inner_type))
15395 return true;
15396 return tree_expr_nonnegative_warnv_p (op0,
15397 strict_overflow_p);
15398 }
15399 }
15400 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15401 {
15402 if (TREE_CODE (inner_type) == REAL_TYPE)
15403 return tree_expr_nonnegative_warnv_p (op0,
15404 strict_overflow_p);
15405 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15406 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15407 && TYPE_UNSIGNED (inner_type);
15408 }
15409 }
15410 break;
15411
15412 default:
15413 return tree_simple_nonnegative_warnv_p (code, type);
15414 }
15415
15416 /* We don't know sign of `t', so be conservative and return false. */
15417 return false;
15418 }
15419
15420 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15421 value is based on the assumption that signed overflow is undefined,
15422 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15423 *STRICT_OVERFLOW_P. */
15424
15425 bool
15426 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15427 tree op1, bool *strict_overflow_p)
15428 {
15429 if (TYPE_UNSIGNED (type))
15430 return true;
15431
15432 switch (code)
15433 {
15434 case POINTER_PLUS_EXPR:
15435 case PLUS_EXPR:
15436 if (FLOAT_TYPE_P (type))
15437 return (tree_expr_nonnegative_warnv_p (op0,
15438 strict_overflow_p)
15439 && tree_expr_nonnegative_warnv_p (op1,
15440 strict_overflow_p));
15441
15442 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15443 both unsigned and at least 2 bits shorter than the result. */
15444 if (TREE_CODE (type) == INTEGER_TYPE
15445 && TREE_CODE (op0) == NOP_EXPR
15446 && TREE_CODE (op1) == NOP_EXPR)
15447 {
15448 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15449 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15450 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15451 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15452 {
15453 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15454 TYPE_PRECISION (inner2)) + 1;
15455 return prec < TYPE_PRECISION (type);
15456 }
15457 }
15458 break;
15459
15460 case MULT_EXPR:
15461 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15462 {
15463 /* x * x is always non-negative for floating point x
15464 or without overflow. */
15465 if (operand_equal_p (op0, op1, 0)
15466 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15467 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15468 {
15469 if (TYPE_OVERFLOW_UNDEFINED (type))
15470 *strict_overflow_p = true;
15471 return true;
15472 }
15473 }
15474
15475 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15476 both unsigned and their total bits is shorter than the result. */
15477 if (TREE_CODE (type) == INTEGER_TYPE
15478 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15479 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15480 {
15481 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15482 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15483 : TREE_TYPE (op0);
15484 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15485 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15486 : TREE_TYPE (op1);
15487
15488 bool unsigned0 = TYPE_UNSIGNED (inner0);
15489 bool unsigned1 = TYPE_UNSIGNED (inner1);
15490
15491 if (TREE_CODE (op0) == INTEGER_CST)
15492 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15493
15494 if (TREE_CODE (op1) == INTEGER_CST)
15495 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15496
15497 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15498 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15499 {
15500 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15501 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15502 : TYPE_PRECISION (inner0);
15503
15504 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15505 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15506 : TYPE_PRECISION (inner1);
15507
15508 return precision0 + precision1 < TYPE_PRECISION (type);
15509 }
15510 }
15511 return false;
15512
15513 case BIT_AND_EXPR:
15514 case MAX_EXPR:
15515 return (tree_expr_nonnegative_warnv_p (op0,
15516 strict_overflow_p)
15517 || tree_expr_nonnegative_warnv_p (op1,
15518 strict_overflow_p));
15519
15520 case BIT_IOR_EXPR:
15521 case BIT_XOR_EXPR:
15522 case MIN_EXPR:
15523 case RDIV_EXPR:
15524 case TRUNC_DIV_EXPR:
15525 case CEIL_DIV_EXPR:
15526 case FLOOR_DIV_EXPR:
15527 case ROUND_DIV_EXPR:
15528 return (tree_expr_nonnegative_warnv_p (op0,
15529 strict_overflow_p)
15530 && tree_expr_nonnegative_warnv_p (op1,
15531 strict_overflow_p));
15532
15533 case TRUNC_MOD_EXPR:
15534 case CEIL_MOD_EXPR:
15535 case FLOOR_MOD_EXPR:
15536 case ROUND_MOD_EXPR:
15537 return tree_expr_nonnegative_warnv_p (op0,
15538 strict_overflow_p);
15539 default:
15540 return tree_simple_nonnegative_warnv_p (code, type);
15541 }
15542
15543 /* We don't know sign of `t', so be conservative and return false. */
15544 return false;
15545 }
15546
15547 /* Return true if T is known to be non-negative. If the return
15548 value is based on the assumption that signed overflow is undefined,
15549 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15550 *STRICT_OVERFLOW_P. */
15551
15552 bool
15553 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15554 {
15555 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15556 return true;
15557
15558 switch (TREE_CODE (t))
15559 {
15560 case INTEGER_CST:
15561 return tree_int_cst_sgn (t) >= 0;
15562
15563 case REAL_CST:
15564 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15565
15566 case FIXED_CST:
15567 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15568
15569 case COND_EXPR:
15570 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15571 strict_overflow_p)
15572 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15573 strict_overflow_p));
15574 default:
15575 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15576 TREE_TYPE (t));
15577 }
15578 /* We don't know sign of `t', so be conservative and return false. */
15579 return false;
15580 }
15581
15582 /* Return true if T is known to be non-negative. If the return
15583 value is based on the assumption that signed overflow is undefined,
15584 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15585 *STRICT_OVERFLOW_P. */
15586
15587 bool
15588 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15589 tree arg0, tree arg1, bool *strict_overflow_p)
15590 {
15591 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15592 switch (DECL_FUNCTION_CODE (fndecl))
15593 {
15594 CASE_FLT_FN (BUILT_IN_ACOS):
15595 CASE_FLT_FN (BUILT_IN_ACOSH):
15596 CASE_FLT_FN (BUILT_IN_CABS):
15597 CASE_FLT_FN (BUILT_IN_COSH):
15598 CASE_FLT_FN (BUILT_IN_ERFC):
15599 CASE_FLT_FN (BUILT_IN_EXP):
15600 CASE_FLT_FN (BUILT_IN_EXP10):
15601 CASE_FLT_FN (BUILT_IN_EXP2):
15602 CASE_FLT_FN (BUILT_IN_FABS):
15603 CASE_FLT_FN (BUILT_IN_FDIM):
15604 CASE_FLT_FN (BUILT_IN_HYPOT):
15605 CASE_FLT_FN (BUILT_IN_POW10):
15606 CASE_INT_FN (BUILT_IN_FFS):
15607 CASE_INT_FN (BUILT_IN_PARITY):
15608 CASE_INT_FN (BUILT_IN_POPCOUNT):
15609 CASE_INT_FN (BUILT_IN_CLZ):
15610 CASE_INT_FN (BUILT_IN_CLRSB):
15611 case BUILT_IN_BSWAP32:
15612 case BUILT_IN_BSWAP64:
15613 /* Always true. */
15614 return true;
15615
15616 CASE_FLT_FN (BUILT_IN_SQRT):
15617 /* sqrt(-0.0) is -0.0. */
15618 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15619 return true;
15620 return tree_expr_nonnegative_warnv_p (arg0,
15621 strict_overflow_p);
15622
15623 CASE_FLT_FN (BUILT_IN_ASINH):
15624 CASE_FLT_FN (BUILT_IN_ATAN):
15625 CASE_FLT_FN (BUILT_IN_ATANH):
15626 CASE_FLT_FN (BUILT_IN_CBRT):
15627 CASE_FLT_FN (BUILT_IN_CEIL):
15628 CASE_FLT_FN (BUILT_IN_ERF):
15629 CASE_FLT_FN (BUILT_IN_EXPM1):
15630 CASE_FLT_FN (BUILT_IN_FLOOR):
15631 CASE_FLT_FN (BUILT_IN_FMOD):
15632 CASE_FLT_FN (BUILT_IN_FREXP):
15633 CASE_FLT_FN (BUILT_IN_ICEIL):
15634 CASE_FLT_FN (BUILT_IN_IFLOOR):
15635 CASE_FLT_FN (BUILT_IN_IRINT):
15636 CASE_FLT_FN (BUILT_IN_IROUND):
15637 CASE_FLT_FN (BUILT_IN_LCEIL):
15638 CASE_FLT_FN (BUILT_IN_LDEXP):
15639 CASE_FLT_FN (BUILT_IN_LFLOOR):
15640 CASE_FLT_FN (BUILT_IN_LLCEIL):
15641 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15642 CASE_FLT_FN (BUILT_IN_LLRINT):
15643 CASE_FLT_FN (BUILT_IN_LLROUND):
15644 CASE_FLT_FN (BUILT_IN_LRINT):
15645 CASE_FLT_FN (BUILT_IN_LROUND):
15646 CASE_FLT_FN (BUILT_IN_MODF):
15647 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15648 CASE_FLT_FN (BUILT_IN_RINT):
15649 CASE_FLT_FN (BUILT_IN_ROUND):
15650 CASE_FLT_FN (BUILT_IN_SCALB):
15651 CASE_FLT_FN (BUILT_IN_SCALBLN):
15652 CASE_FLT_FN (BUILT_IN_SCALBN):
15653 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15654 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15655 CASE_FLT_FN (BUILT_IN_SINH):
15656 CASE_FLT_FN (BUILT_IN_TANH):
15657 CASE_FLT_FN (BUILT_IN_TRUNC):
15658 /* True if the 1st argument is nonnegative. */
15659 return tree_expr_nonnegative_warnv_p (arg0,
15660 strict_overflow_p);
15661
15662 CASE_FLT_FN (BUILT_IN_FMAX):
15663 /* True if the 1st OR 2nd arguments are nonnegative. */
15664 return (tree_expr_nonnegative_warnv_p (arg0,
15665 strict_overflow_p)
15666 || (tree_expr_nonnegative_warnv_p (arg1,
15667 strict_overflow_p)));
15668
15669 CASE_FLT_FN (BUILT_IN_FMIN):
15670 /* True if the 1st AND 2nd arguments are nonnegative. */
15671 return (tree_expr_nonnegative_warnv_p (arg0,
15672 strict_overflow_p)
15673 && (tree_expr_nonnegative_warnv_p (arg1,
15674 strict_overflow_p)));
15675
15676 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15677 /* True if the 2nd argument is nonnegative. */
15678 return tree_expr_nonnegative_warnv_p (arg1,
15679 strict_overflow_p);
15680
15681 CASE_FLT_FN (BUILT_IN_POWI):
15682 /* True if the 1st argument is nonnegative or the second
15683 argument is an even integer. */
15684 if (TREE_CODE (arg1) == INTEGER_CST
15685 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15686 return true;
15687 return tree_expr_nonnegative_warnv_p (arg0,
15688 strict_overflow_p);
15689
15690 CASE_FLT_FN (BUILT_IN_POW):
15691 /* True if the 1st argument is nonnegative or the second
15692 argument is an even integer valued real. */
15693 if (TREE_CODE (arg1) == REAL_CST)
15694 {
15695 REAL_VALUE_TYPE c;
15696 HOST_WIDE_INT n;
15697
15698 c = TREE_REAL_CST (arg1);
15699 n = real_to_integer (&c);
15700 if ((n & 1) == 0)
15701 {
15702 REAL_VALUE_TYPE cint;
15703 real_from_integer (&cint, VOIDmode, n,
15704 n < 0 ? -1 : 0, 0);
15705 if (real_identical (&c, &cint))
15706 return true;
15707 }
15708 }
15709 return tree_expr_nonnegative_warnv_p (arg0,
15710 strict_overflow_p);
15711
15712 default:
15713 break;
15714 }
15715 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15716 type);
15717 }
15718
15719 /* Return true if T is known to be non-negative. If the return
15720 value is based on the assumption that signed overflow is undefined,
15721 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15722 *STRICT_OVERFLOW_P. */
15723
15724 bool
15725 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15726 {
15727 enum tree_code code = TREE_CODE (t);
15728 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15729 return true;
15730
15731 switch (code)
15732 {
15733 case TARGET_EXPR:
15734 {
15735 tree temp = TARGET_EXPR_SLOT (t);
15736 t = TARGET_EXPR_INITIAL (t);
15737
15738 /* If the initializer is non-void, then it's a normal expression
15739 that will be assigned to the slot. */
15740 if (!VOID_TYPE_P (t))
15741 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15742
15743 /* Otherwise, the initializer sets the slot in some way. One common
15744 way is an assignment statement at the end of the initializer. */
15745 while (1)
15746 {
15747 if (TREE_CODE (t) == BIND_EXPR)
15748 t = expr_last (BIND_EXPR_BODY (t));
15749 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15750 || TREE_CODE (t) == TRY_CATCH_EXPR)
15751 t = expr_last (TREE_OPERAND (t, 0));
15752 else if (TREE_CODE (t) == STATEMENT_LIST)
15753 t = expr_last (t);
15754 else
15755 break;
15756 }
15757 if (TREE_CODE (t) == MODIFY_EXPR
15758 && TREE_OPERAND (t, 0) == temp)
15759 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15760 strict_overflow_p);
15761
15762 return false;
15763 }
15764
15765 case CALL_EXPR:
15766 {
15767 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15768 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15769
15770 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15771 get_callee_fndecl (t),
15772 arg0,
15773 arg1,
15774 strict_overflow_p);
15775 }
15776 case COMPOUND_EXPR:
15777 case MODIFY_EXPR:
15778 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15779 strict_overflow_p);
15780 case BIND_EXPR:
15781 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15782 strict_overflow_p);
15783 case SAVE_EXPR:
15784 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15785 strict_overflow_p);
15786
15787 default:
15788 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15789 TREE_TYPE (t));
15790 }
15791
15792 /* We don't know sign of `t', so be conservative and return false. */
15793 return false;
15794 }
15795
15796 /* Return true if T is known to be non-negative. If the return
15797 value is based on the assumption that signed overflow is undefined,
15798 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15799 *STRICT_OVERFLOW_P. */
15800
15801 bool
15802 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15803 {
15804 enum tree_code code;
15805 if (t == error_mark_node)
15806 return false;
15807
15808 code = TREE_CODE (t);
15809 switch (TREE_CODE_CLASS (code))
15810 {
15811 case tcc_binary:
15812 case tcc_comparison:
15813 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15814 TREE_TYPE (t),
15815 TREE_OPERAND (t, 0),
15816 TREE_OPERAND (t, 1),
15817 strict_overflow_p);
15818
15819 case tcc_unary:
15820 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15821 TREE_TYPE (t),
15822 TREE_OPERAND (t, 0),
15823 strict_overflow_p);
15824
15825 case tcc_constant:
15826 case tcc_declaration:
15827 case tcc_reference:
15828 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15829
15830 default:
15831 break;
15832 }
15833
15834 switch (code)
15835 {
15836 case TRUTH_AND_EXPR:
15837 case TRUTH_OR_EXPR:
15838 case TRUTH_XOR_EXPR:
15839 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15840 TREE_TYPE (t),
15841 TREE_OPERAND (t, 0),
15842 TREE_OPERAND (t, 1),
15843 strict_overflow_p);
15844 case TRUTH_NOT_EXPR:
15845 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15846 TREE_TYPE (t),
15847 TREE_OPERAND (t, 0),
15848 strict_overflow_p);
15849
15850 case COND_EXPR:
15851 case CONSTRUCTOR:
15852 case OBJ_TYPE_REF:
15853 case ASSERT_EXPR:
15854 case ADDR_EXPR:
15855 case WITH_SIZE_EXPR:
15856 case SSA_NAME:
15857 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15858
15859 default:
15860 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15861 }
15862 }
15863
15864 /* Return true if `t' is known to be non-negative. Handle warnings
15865 about undefined signed overflow. */
15866
15867 bool
15868 tree_expr_nonnegative_p (tree t)
15869 {
15870 bool ret, strict_overflow_p;
15871
15872 strict_overflow_p = false;
15873 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15874 if (strict_overflow_p)
15875 fold_overflow_warning (("assuming signed overflow does not occur when "
15876 "determining that expression is always "
15877 "non-negative"),
15878 WARN_STRICT_OVERFLOW_MISC);
15879 return ret;
15880 }
15881
15882
15883 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15884 For floating point we further ensure that T is not denormal.
15885 Similar logic is present in nonzero_address in rtlanal.h.
15886
15887 If the return value is based on the assumption that signed overflow
15888 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15889 change *STRICT_OVERFLOW_P. */
15890
15891 bool
15892 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15893 bool *strict_overflow_p)
15894 {
15895 switch (code)
15896 {
15897 case ABS_EXPR:
15898 return tree_expr_nonzero_warnv_p (op0,
15899 strict_overflow_p);
15900
15901 case NOP_EXPR:
15902 {
15903 tree inner_type = TREE_TYPE (op0);
15904 tree outer_type = type;
15905
15906 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15907 && tree_expr_nonzero_warnv_p (op0,
15908 strict_overflow_p));
15909 }
15910 break;
15911
15912 case NON_LVALUE_EXPR:
15913 return tree_expr_nonzero_warnv_p (op0,
15914 strict_overflow_p);
15915
15916 default:
15917 break;
15918 }
15919
15920 return false;
15921 }
15922
15923 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15924 For floating point we further ensure that T is not denormal.
15925 Similar logic is present in nonzero_address in rtlanal.h.
15926
15927 If the return value is based on the assumption that signed overflow
15928 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15929 change *STRICT_OVERFLOW_P. */
15930
15931 bool
15932 tree_binary_nonzero_warnv_p (enum tree_code code,
15933 tree type,
15934 tree op0,
15935 tree op1, bool *strict_overflow_p)
15936 {
15937 bool sub_strict_overflow_p;
15938 switch (code)
15939 {
15940 case POINTER_PLUS_EXPR:
15941 case PLUS_EXPR:
15942 if (TYPE_OVERFLOW_UNDEFINED (type))
15943 {
15944 /* With the presence of negative values it is hard
15945 to say something. */
15946 sub_strict_overflow_p = false;
15947 if (!tree_expr_nonnegative_warnv_p (op0,
15948 &sub_strict_overflow_p)
15949 || !tree_expr_nonnegative_warnv_p (op1,
15950 &sub_strict_overflow_p))
15951 return false;
15952 /* One of operands must be positive and the other non-negative. */
15953 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15954 overflows, on a twos-complement machine the sum of two
15955 nonnegative numbers can never be zero. */
15956 return (tree_expr_nonzero_warnv_p (op0,
15957 strict_overflow_p)
15958 || tree_expr_nonzero_warnv_p (op1,
15959 strict_overflow_p));
15960 }
15961 break;
15962
15963 case MULT_EXPR:
15964 if (TYPE_OVERFLOW_UNDEFINED (type))
15965 {
15966 if (tree_expr_nonzero_warnv_p (op0,
15967 strict_overflow_p)
15968 && tree_expr_nonzero_warnv_p (op1,
15969 strict_overflow_p))
15970 {
15971 *strict_overflow_p = true;
15972 return true;
15973 }
15974 }
15975 break;
15976
15977 case MIN_EXPR:
15978 sub_strict_overflow_p = false;
15979 if (tree_expr_nonzero_warnv_p (op0,
15980 &sub_strict_overflow_p)
15981 && tree_expr_nonzero_warnv_p (op1,
15982 &sub_strict_overflow_p))
15983 {
15984 if (sub_strict_overflow_p)
15985 *strict_overflow_p = true;
15986 }
15987 break;
15988
15989 case MAX_EXPR:
15990 sub_strict_overflow_p = false;
15991 if (tree_expr_nonzero_warnv_p (op0,
15992 &sub_strict_overflow_p))
15993 {
15994 if (sub_strict_overflow_p)
15995 *strict_overflow_p = true;
15996
15997 /* When both operands are nonzero, then MAX must be too. */
15998 if (tree_expr_nonzero_warnv_p (op1,
15999 strict_overflow_p))
16000 return true;
16001
16002 /* MAX where operand 0 is positive is positive. */
16003 return tree_expr_nonnegative_warnv_p (op0,
16004 strict_overflow_p);
16005 }
16006 /* MAX where operand 1 is positive is positive. */
16007 else if (tree_expr_nonzero_warnv_p (op1,
16008 &sub_strict_overflow_p)
16009 && tree_expr_nonnegative_warnv_p (op1,
16010 &sub_strict_overflow_p))
16011 {
16012 if (sub_strict_overflow_p)
16013 *strict_overflow_p = true;
16014 return true;
16015 }
16016 break;
16017
16018 case BIT_IOR_EXPR:
16019 return (tree_expr_nonzero_warnv_p (op1,
16020 strict_overflow_p)
16021 || tree_expr_nonzero_warnv_p (op0,
16022 strict_overflow_p));
16023
16024 default:
16025 break;
16026 }
16027
16028 return false;
16029 }
16030
16031 /* Return true when T is an address and is known to be nonzero.
16032 For floating point we further ensure that T is not denormal.
16033 Similar logic is present in nonzero_address in rtlanal.h.
16034
16035 If the return value is based on the assumption that signed overflow
16036 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16037 change *STRICT_OVERFLOW_P. */
16038
16039 bool
16040 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16041 {
16042 bool sub_strict_overflow_p;
16043 switch (TREE_CODE (t))
16044 {
16045 case INTEGER_CST:
16046 return !integer_zerop (t);
16047
16048 case ADDR_EXPR:
16049 {
16050 tree base = TREE_OPERAND (t, 0);
16051 if (!DECL_P (base))
16052 base = get_base_address (base);
16053
16054 if (!base)
16055 return false;
16056
16057 /* Weak declarations may link to NULL. Other things may also be NULL
16058 so protect with -fdelete-null-pointer-checks; but not variables
16059 allocated on the stack. */
16060 if (DECL_P (base)
16061 && (flag_delete_null_pointer_checks
16062 || (DECL_CONTEXT (base)
16063 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16064 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16065 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16066
16067 /* Constants are never weak. */
16068 if (CONSTANT_CLASS_P (base))
16069 return true;
16070
16071 return false;
16072 }
16073
16074 case COND_EXPR:
16075 sub_strict_overflow_p = false;
16076 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16077 &sub_strict_overflow_p)
16078 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16079 &sub_strict_overflow_p))
16080 {
16081 if (sub_strict_overflow_p)
16082 *strict_overflow_p = true;
16083 return true;
16084 }
16085 break;
16086
16087 default:
16088 break;
16089 }
16090 return false;
16091 }
16092
16093 /* Return true when T is an address and is known to be nonzero.
16094 For floating point we further ensure that T is not denormal.
16095 Similar logic is present in nonzero_address in rtlanal.h.
16096
16097 If the return value is based on the assumption that signed overflow
16098 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16099 change *STRICT_OVERFLOW_P. */
16100
16101 bool
16102 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16103 {
16104 tree type = TREE_TYPE (t);
16105 enum tree_code code;
16106
16107 /* Doing something useful for floating point would need more work. */
16108 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16109 return false;
16110
16111 code = TREE_CODE (t);
16112 switch (TREE_CODE_CLASS (code))
16113 {
16114 case tcc_unary:
16115 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16116 strict_overflow_p);
16117 case tcc_binary:
16118 case tcc_comparison:
16119 return tree_binary_nonzero_warnv_p (code, type,
16120 TREE_OPERAND (t, 0),
16121 TREE_OPERAND (t, 1),
16122 strict_overflow_p);
16123 case tcc_constant:
16124 case tcc_declaration:
16125 case tcc_reference:
16126 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16127
16128 default:
16129 break;
16130 }
16131
16132 switch (code)
16133 {
16134 case TRUTH_NOT_EXPR:
16135 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16136 strict_overflow_p);
16137
16138 case TRUTH_AND_EXPR:
16139 case TRUTH_OR_EXPR:
16140 case TRUTH_XOR_EXPR:
16141 return tree_binary_nonzero_warnv_p (code, type,
16142 TREE_OPERAND (t, 0),
16143 TREE_OPERAND (t, 1),
16144 strict_overflow_p);
16145
16146 case COND_EXPR:
16147 case CONSTRUCTOR:
16148 case OBJ_TYPE_REF:
16149 case ASSERT_EXPR:
16150 case ADDR_EXPR:
16151 case WITH_SIZE_EXPR:
16152 case SSA_NAME:
16153 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16154
16155 case COMPOUND_EXPR:
16156 case MODIFY_EXPR:
16157 case BIND_EXPR:
16158 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16159 strict_overflow_p);
16160
16161 case SAVE_EXPR:
16162 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16163 strict_overflow_p);
16164
16165 case CALL_EXPR:
16166 return alloca_call_p (t);
16167
16168 default:
16169 break;
16170 }
16171 return false;
16172 }
16173
16174 /* Return true when T is an address and is known to be nonzero.
16175 Handle warnings about undefined signed overflow. */
16176
16177 bool
16178 tree_expr_nonzero_p (tree t)
16179 {
16180 bool ret, strict_overflow_p;
16181
16182 strict_overflow_p = false;
16183 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16184 if (strict_overflow_p)
16185 fold_overflow_warning (("assuming signed overflow does not occur when "
16186 "determining that expression is always "
16187 "non-zero"),
16188 WARN_STRICT_OVERFLOW_MISC);
16189 return ret;
16190 }
16191
16192 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16193 attempt to fold the expression to a constant without modifying TYPE,
16194 OP0 or OP1.
16195
16196 If the expression could be simplified to a constant, then return
16197 the constant. If the expression would not be simplified to a
16198 constant, then return NULL_TREE. */
16199
16200 tree
16201 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16202 {
16203 tree tem = fold_binary (code, type, op0, op1);
16204 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16205 }
16206
16207 /* Given the components of a unary expression CODE, TYPE and OP0,
16208 attempt to fold the expression to a constant without modifying
16209 TYPE or OP0.
16210
16211 If the expression could be simplified to a constant, then return
16212 the constant. If the expression would not be simplified to a
16213 constant, then return NULL_TREE. */
16214
16215 tree
16216 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16217 {
16218 tree tem = fold_unary (code, type, op0);
16219 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16220 }
16221
16222 /* If EXP represents referencing an element in a constant string
16223 (either via pointer arithmetic or array indexing), return the
16224 tree representing the value accessed, otherwise return NULL. */
16225
16226 tree
16227 fold_read_from_constant_string (tree exp)
16228 {
16229 if ((TREE_CODE (exp) == INDIRECT_REF
16230 || TREE_CODE (exp) == ARRAY_REF)
16231 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16232 {
16233 tree exp1 = TREE_OPERAND (exp, 0);
16234 tree index;
16235 tree string;
16236 location_t loc = EXPR_LOCATION (exp);
16237
16238 if (TREE_CODE (exp) == INDIRECT_REF)
16239 string = string_constant (exp1, &index);
16240 else
16241 {
16242 tree low_bound = array_ref_low_bound (exp);
16243 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16244
16245 /* Optimize the special-case of a zero lower bound.
16246
16247 We convert the low_bound to sizetype to avoid some problems
16248 with constant folding. (E.g. suppose the lower bound is 1,
16249 and its mode is QI. Without the conversion,l (ARRAY
16250 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16251 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16252 if (! integer_zerop (low_bound))
16253 index = size_diffop_loc (loc, index,
16254 fold_convert_loc (loc, sizetype, low_bound));
16255
16256 string = exp1;
16257 }
16258
16259 if (string
16260 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16261 && TREE_CODE (string) == STRING_CST
16262 && TREE_CODE (index) == INTEGER_CST
16263 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16264 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16265 == MODE_INT)
16266 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16267 return build_int_cst_type (TREE_TYPE (exp),
16268 (TREE_STRING_POINTER (string)
16269 [TREE_INT_CST_LOW (index)]));
16270 }
16271 return NULL;
16272 }
16273
16274 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16275 an integer constant, real, or fixed-point constant.
16276
16277 TYPE is the type of the result. */
16278
16279 static tree
16280 fold_negate_const (tree arg0, tree type)
16281 {
16282 tree t = NULL_TREE;
16283
16284 switch (TREE_CODE (arg0))
16285 {
16286 case INTEGER_CST:
16287 {
16288 double_int val = tree_to_double_int (arg0);
16289 bool overflow;
16290 val = val.neg_with_overflow (&overflow);
16291 t = force_fit_type_double (type, val, 1,
16292 (overflow | TREE_OVERFLOW (arg0))
16293 && !TYPE_UNSIGNED (type));
16294 break;
16295 }
16296
16297 case REAL_CST:
16298 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16299 break;
16300
16301 case FIXED_CST:
16302 {
16303 FIXED_VALUE_TYPE f;
16304 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16305 &(TREE_FIXED_CST (arg0)), NULL,
16306 TYPE_SATURATING (type));
16307 t = build_fixed (type, f);
16308 /* Propagate overflow flags. */
16309 if (overflow_p | TREE_OVERFLOW (arg0))
16310 TREE_OVERFLOW (t) = 1;
16311 break;
16312 }
16313
16314 default:
16315 gcc_unreachable ();
16316 }
16317
16318 return t;
16319 }
16320
16321 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16322 an integer constant or real constant.
16323
16324 TYPE is the type of the result. */
16325
16326 tree
16327 fold_abs_const (tree arg0, tree type)
16328 {
16329 tree t = NULL_TREE;
16330
16331 switch (TREE_CODE (arg0))
16332 {
16333 case INTEGER_CST:
16334 {
16335 double_int val = tree_to_double_int (arg0);
16336
16337 /* If the value is unsigned or non-negative, then the absolute value
16338 is the same as the ordinary value. */
16339 if (TYPE_UNSIGNED (type)
16340 || !val.is_negative ())
16341 t = arg0;
16342
16343 /* If the value is negative, then the absolute value is
16344 its negation. */
16345 else
16346 {
16347 bool overflow;
16348 val = val.neg_with_overflow (&overflow);
16349 t = force_fit_type_double (type, val, -1,
16350 overflow | TREE_OVERFLOW (arg0));
16351 }
16352 }
16353 break;
16354
16355 case REAL_CST:
16356 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16357 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16358 else
16359 t = arg0;
16360 break;
16361
16362 default:
16363 gcc_unreachable ();
16364 }
16365
16366 return t;
16367 }
16368
16369 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16370 constant. TYPE is the type of the result. */
16371
16372 static tree
16373 fold_not_const (const_tree arg0, tree type)
16374 {
16375 double_int val;
16376
16377 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16378
16379 val = ~tree_to_double_int (arg0);
16380 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16381 }
16382
16383 /* Given CODE, a relational operator, the target type, TYPE and two
16384 constant operands OP0 and OP1, return the result of the
16385 relational operation. If the result is not a compile time
16386 constant, then return NULL_TREE. */
16387
16388 static tree
16389 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16390 {
16391 int result, invert;
16392
16393 /* From here on, the only cases we handle are when the result is
16394 known to be a constant. */
16395
16396 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16397 {
16398 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16399 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16400
16401 /* Handle the cases where either operand is a NaN. */
16402 if (real_isnan (c0) || real_isnan (c1))
16403 {
16404 switch (code)
16405 {
16406 case EQ_EXPR:
16407 case ORDERED_EXPR:
16408 result = 0;
16409 break;
16410
16411 case NE_EXPR:
16412 case UNORDERED_EXPR:
16413 case UNLT_EXPR:
16414 case UNLE_EXPR:
16415 case UNGT_EXPR:
16416 case UNGE_EXPR:
16417 case UNEQ_EXPR:
16418 result = 1;
16419 break;
16420
16421 case LT_EXPR:
16422 case LE_EXPR:
16423 case GT_EXPR:
16424 case GE_EXPR:
16425 case LTGT_EXPR:
16426 if (flag_trapping_math)
16427 return NULL_TREE;
16428 result = 0;
16429 break;
16430
16431 default:
16432 gcc_unreachable ();
16433 }
16434
16435 return constant_boolean_node (result, type);
16436 }
16437
16438 return constant_boolean_node (real_compare (code, c0, c1), type);
16439 }
16440
16441 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16442 {
16443 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16444 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16445 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16446 }
16447
16448 /* Handle equality/inequality of complex constants. */
16449 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16450 {
16451 tree rcond = fold_relational_const (code, type,
16452 TREE_REALPART (op0),
16453 TREE_REALPART (op1));
16454 tree icond = fold_relational_const (code, type,
16455 TREE_IMAGPART (op0),
16456 TREE_IMAGPART (op1));
16457 if (code == EQ_EXPR)
16458 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16459 else if (code == NE_EXPR)
16460 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16461 else
16462 return NULL_TREE;
16463 }
16464
16465 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16466 {
16467 unsigned count = VECTOR_CST_NELTS (op0);
16468 tree *elts = XALLOCAVEC (tree, count);
16469 gcc_assert (VECTOR_CST_NELTS (op1) == count
16470 && TYPE_VECTOR_SUBPARTS (type) == count);
16471
16472 for (unsigned i = 0; i < count; i++)
16473 {
16474 tree elem_type = TREE_TYPE (type);
16475 tree elem0 = VECTOR_CST_ELT (op0, i);
16476 tree elem1 = VECTOR_CST_ELT (op1, i);
16477
16478 tree tem = fold_relational_const (code, elem_type,
16479 elem0, elem1);
16480
16481 if (tem == NULL_TREE)
16482 return NULL_TREE;
16483
16484 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16485 }
16486
16487 return build_vector (type, elts);
16488 }
16489
16490 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16491
16492 To compute GT, swap the arguments and do LT.
16493 To compute GE, do LT and invert the result.
16494 To compute LE, swap the arguments, do LT and invert the result.
16495 To compute NE, do EQ and invert the result.
16496
16497 Therefore, the code below must handle only EQ and LT. */
16498
16499 if (code == LE_EXPR || code == GT_EXPR)
16500 {
16501 tree tem = op0;
16502 op0 = op1;
16503 op1 = tem;
16504 code = swap_tree_comparison (code);
16505 }
16506
16507 /* Note that it is safe to invert for real values here because we
16508 have already handled the one case that it matters. */
16509
16510 invert = 0;
16511 if (code == NE_EXPR || code == GE_EXPR)
16512 {
16513 invert = 1;
16514 code = invert_tree_comparison (code, false);
16515 }
16516
16517 /* Compute a result for LT or EQ if args permit;
16518 Otherwise return T. */
16519 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16520 {
16521 if (code == EQ_EXPR)
16522 result = tree_int_cst_equal (op0, op1);
16523 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16524 result = INT_CST_LT_UNSIGNED (op0, op1);
16525 else
16526 result = INT_CST_LT (op0, op1);
16527 }
16528 else
16529 return NULL_TREE;
16530
16531 if (invert)
16532 result ^= 1;
16533 return constant_boolean_node (result, type);
16534 }
16535
16536 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16537 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16538 itself. */
16539
16540 tree
16541 fold_build_cleanup_point_expr (tree type, tree expr)
16542 {
16543 /* If the expression does not have side effects then we don't have to wrap
16544 it with a cleanup point expression. */
16545 if (!TREE_SIDE_EFFECTS (expr))
16546 return expr;
16547
16548 /* If the expression is a return, check to see if the expression inside the
16549 return has no side effects or the right hand side of the modify expression
16550 inside the return. If either don't have side effects set we don't need to
16551 wrap the expression in a cleanup point expression. Note we don't check the
16552 left hand side of the modify because it should always be a return decl. */
16553 if (TREE_CODE (expr) == RETURN_EXPR)
16554 {
16555 tree op = TREE_OPERAND (expr, 0);
16556 if (!op || !TREE_SIDE_EFFECTS (op))
16557 return expr;
16558 op = TREE_OPERAND (op, 1);
16559 if (!TREE_SIDE_EFFECTS (op))
16560 return expr;
16561 }
16562
16563 return build1 (CLEANUP_POINT_EXPR, type, expr);
16564 }
16565
16566 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16567 of an indirection through OP0, or NULL_TREE if no simplification is
16568 possible. */
16569
16570 tree
16571 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16572 {
16573 tree sub = op0;
16574 tree subtype;
16575
16576 STRIP_NOPS (sub);
16577 subtype = TREE_TYPE (sub);
16578 if (!POINTER_TYPE_P (subtype))
16579 return NULL_TREE;
16580
16581 if (TREE_CODE (sub) == ADDR_EXPR)
16582 {
16583 tree op = TREE_OPERAND (sub, 0);
16584 tree optype = TREE_TYPE (op);
16585 /* *&CONST_DECL -> to the value of the const decl. */
16586 if (TREE_CODE (op) == CONST_DECL)
16587 return DECL_INITIAL (op);
16588 /* *&p => p; make sure to handle *&"str"[cst] here. */
16589 if (type == optype)
16590 {
16591 tree fop = fold_read_from_constant_string (op);
16592 if (fop)
16593 return fop;
16594 else
16595 return op;
16596 }
16597 /* *(foo *)&fooarray => fooarray[0] */
16598 else if (TREE_CODE (optype) == ARRAY_TYPE
16599 && type == TREE_TYPE (optype)
16600 && (!in_gimple_form
16601 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16602 {
16603 tree type_domain = TYPE_DOMAIN (optype);
16604 tree min_val = size_zero_node;
16605 if (type_domain && TYPE_MIN_VALUE (type_domain))
16606 min_val = TYPE_MIN_VALUE (type_domain);
16607 if (in_gimple_form
16608 && TREE_CODE (min_val) != INTEGER_CST)
16609 return NULL_TREE;
16610 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16611 NULL_TREE, NULL_TREE);
16612 }
16613 /* *(foo *)&complexfoo => __real__ complexfoo */
16614 else if (TREE_CODE (optype) == COMPLEX_TYPE
16615 && type == TREE_TYPE (optype))
16616 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16617 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16618 else if (TREE_CODE (optype) == VECTOR_TYPE
16619 && type == TREE_TYPE (optype))
16620 {
16621 tree part_width = TYPE_SIZE (type);
16622 tree index = bitsize_int (0);
16623 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16624 }
16625 }
16626
16627 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16628 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16629 {
16630 tree op00 = TREE_OPERAND (sub, 0);
16631 tree op01 = TREE_OPERAND (sub, 1);
16632
16633 STRIP_NOPS (op00);
16634 if (TREE_CODE (op00) == ADDR_EXPR)
16635 {
16636 tree op00type;
16637 op00 = TREE_OPERAND (op00, 0);
16638 op00type = TREE_TYPE (op00);
16639
16640 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16641 if (TREE_CODE (op00type) == VECTOR_TYPE
16642 && type == TREE_TYPE (op00type))
16643 {
16644 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16645 tree part_width = TYPE_SIZE (type);
16646 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16647 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16648 tree index = bitsize_int (indexi);
16649
16650 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16651 return fold_build3_loc (loc,
16652 BIT_FIELD_REF, type, op00,
16653 part_width, index);
16654
16655 }
16656 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16657 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16658 && type == TREE_TYPE (op00type))
16659 {
16660 tree size = TYPE_SIZE_UNIT (type);
16661 if (tree_int_cst_equal (size, op01))
16662 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16663 }
16664 /* ((foo *)&fooarray)[1] => fooarray[1] */
16665 else if (TREE_CODE (op00type) == ARRAY_TYPE
16666 && type == TREE_TYPE (op00type))
16667 {
16668 tree type_domain = TYPE_DOMAIN (op00type);
16669 tree min_val = size_zero_node;
16670 if (type_domain && TYPE_MIN_VALUE (type_domain))
16671 min_val = TYPE_MIN_VALUE (type_domain);
16672 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16673 TYPE_SIZE_UNIT (type));
16674 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16675 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16676 NULL_TREE, NULL_TREE);
16677 }
16678 }
16679 }
16680
16681 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16682 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16683 && type == TREE_TYPE (TREE_TYPE (subtype))
16684 && (!in_gimple_form
16685 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16686 {
16687 tree type_domain;
16688 tree min_val = size_zero_node;
16689 sub = build_fold_indirect_ref_loc (loc, sub);
16690 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16691 if (type_domain && TYPE_MIN_VALUE (type_domain))
16692 min_val = TYPE_MIN_VALUE (type_domain);
16693 if (in_gimple_form
16694 && TREE_CODE (min_val) != INTEGER_CST)
16695 return NULL_TREE;
16696 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16697 NULL_TREE);
16698 }
16699
16700 return NULL_TREE;
16701 }
16702
16703 /* Builds an expression for an indirection through T, simplifying some
16704 cases. */
16705
16706 tree
16707 build_fold_indirect_ref_loc (location_t loc, tree t)
16708 {
16709 tree type = TREE_TYPE (TREE_TYPE (t));
16710 tree sub = fold_indirect_ref_1 (loc, type, t);
16711
16712 if (sub)
16713 return sub;
16714
16715 return build1_loc (loc, INDIRECT_REF, type, t);
16716 }
16717
16718 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16719
16720 tree
16721 fold_indirect_ref_loc (location_t loc, tree t)
16722 {
16723 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16724
16725 if (sub)
16726 return sub;
16727 else
16728 return t;
16729 }
16730
16731 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16732 whose result is ignored. The type of the returned tree need not be
16733 the same as the original expression. */
16734
16735 tree
16736 fold_ignored_result (tree t)
16737 {
16738 if (!TREE_SIDE_EFFECTS (t))
16739 return integer_zero_node;
16740
16741 for (;;)
16742 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16743 {
16744 case tcc_unary:
16745 t = TREE_OPERAND (t, 0);
16746 break;
16747
16748 case tcc_binary:
16749 case tcc_comparison:
16750 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16751 t = TREE_OPERAND (t, 0);
16752 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16753 t = TREE_OPERAND (t, 1);
16754 else
16755 return t;
16756 break;
16757
16758 case tcc_expression:
16759 switch (TREE_CODE (t))
16760 {
16761 case COMPOUND_EXPR:
16762 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16763 return t;
16764 t = TREE_OPERAND (t, 0);
16765 break;
16766
16767 case COND_EXPR:
16768 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16769 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16770 return t;
16771 t = TREE_OPERAND (t, 0);
16772 break;
16773
16774 default:
16775 return t;
16776 }
16777 break;
16778
16779 default:
16780 return t;
16781 }
16782 }
16783
16784 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16785 This can only be applied to objects of a sizetype. */
16786
16787 tree
16788 round_up_loc (location_t loc, tree value, int divisor)
16789 {
16790 tree div = NULL_TREE;
16791
16792 gcc_assert (divisor > 0);
16793 if (divisor == 1)
16794 return value;
16795
16796 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16797 have to do anything. Only do this when we are not given a const,
16798 because in that case, this check is more expensive than just
16799 doing it. */
16800 if (TREE_CODE (value) != INTEGER_CST)
16801 {
16802 div = build_int_cst (TREE_TYPE (value), divisor);
16803
16804 if (multiple_of_p (TREE_TYPE (value), value, div))
16805 return value;
16806 }
16807
16808 /* If divisor is a power of two, simplify this to bit manipulation. */
16809 if (divisor == (divisor & -divisor))
16810 {
16811 if (TREE_CODE (value) == INTEGER_CST)
16812 {
16813 double_int val = tree_to_double_int (value);
16814 bool overflow_p;
16815
16816 if ((val.low & (divisor - 1)) == 0)
16817 return value;
16818
16819 overflow_p = TREE_OVERFLOW (value);
16820 val.low &= ~(divisor - 1);
16821 val.low += divisor;
16822 if (val.low == 0)
16823 {
16824 val.high++;
16825 if (val.high == 0)
16826 overflow_p = true;
16827 }
16828
16829 return force_fit_type_double (TREE_TYPE (value), val,
16830 -1, overflow_p);
16831 }
16832 else
16833 {
16834 tree t;
16835
16836 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16837 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16838 t = build_int_cst (TREE_TYPE (value), -divisor);
16839 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16840 }
16841 }
16842 else
16843 {
16844 if (!div)
16845 div = build_int_cst (TREE_TYPE (value), divisor);
16846 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16847 value = size_binop_loc (loc, MULT_EXPR, value, div);
16848 }
16849
16850 return value;
16851 }
16852
16853 /* Likewise, but round down. */
16854
16855 tree
16856 round_down_loc (location_t loc, tree value, int divisor)
16857 {
16858 tree div = NULL_TREE;
16859
16860 gcc_assert (divisor > 0);
16861 if (divisor == 1)
16862 return value;
16863
16864 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16865 have to do anything. Only do this when we are not given a const,
16866 because in that case, this check is more expensive than just
16867 doing it. */
16868 if (TREE_CODE (value) != INTEGER_CST)
16869 {
16870 div = build_int_cst (TREE_TYPE (value), divisor);
16871
16872 if (multiple_of_p (TREE_TYPE (value), value, div))
16873 return value;
16874 }
16875
16876 /* If divisor is a power of two, simplify this to bit manipulation. */
16877 if (divisor == (divisor & -divisor))
16878 {
16879 tree t;
16880
16881 t = build_int_cst (TREE_TYPE (value), -divisor);
16882 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16883 }
16884 else
16885 {
16886 if (!div)
16887 div = build_int_cst (TREE_TYPE (value), divisor);
16888 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16889 value = size_binop_loc (loc, MULT_EXPR, value, div);
16890 }
16891
16892 return value;
16893 }
16894
16895 /* Returns the pointer to the base of the object addressed by EXP and
16896 extracts the information about the offset of the access, storing it
16897 to PBITPOS and POFFSET. */
16898
16899 static tree
16900 split_address_to_core_and_offset (tree exp,
16901 HOST_WIDE_INT *pbitpos, tree *poffset)
16902 {
16903 tree core;
16904 enum machine_mode mode;
16905 int unsignedp, volatilep;
16906 HOST_WIDE_INT bitsize;
16907 location_t loc = EXPR_LOCATION (exp);
16908
16909 if (TREE_CODE (exp) == ADDR_EXPR)
16910 {
16911 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16912 poffset, &mode, &unsignedp, &volatilep,
16913 false);
16914 core = build_fold_addr_expr_loc (loc, core);
16915 }
16916 else
16917 {
16918 core = exp;
16919 *pbitpos = 0;
16920 *poffset = NULL_TREE;
16921 }
16922
16923 return core;
16924 }
16925
16926 /* Returns true if addresses of E1 and E2 differ by a constant, false
16927 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16928
16929 bool
16930 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16931 {
16932 tree core1, core2;
16933 HOST_WIDE_INT bitpos1, bitpos2;
16934 tree toffset1, toffset2, tdiff, type;
16935
16936 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16937 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16938
16939 if (bitpos1 % BITS_PER_UNIT != 0
16940 || bitpos2 % BITS_PER_UNIT != 0
16941 || !operand_equal_p (core1, core2, 0))
16942 return false;
16943
16944 if (toffset1 && toffset2)
16945 {
16946 type = TREE_TYPE (toffset1);
16947 if (type != TREE_TYPE (toffset2))
16948 toffset2 = fold_convert (type, toffset2);
16949
16950 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16951 if (!cst_and_fits_in_hwi (tdiff))
16952 return false;
16953
16954 *diff = int_cst_value (tdiff);
16955 }
16956 else if (toffset1 || toffset2)
16957 {
16958 /* If only one of the offsets is non-constant, the difference cannot
16959 be a constant. */
16960 return false;
16961 }
16962 else
16963 *diff = 0;
16964
16965 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16966 return true;
16967 }
16968
16969 /* Simplify the floating point expression EXP when the sign of the
16970 result is not significant. Return NULL_TREE if no simplification
16971 is possible. */
16972
16973 tree
16974 fold_strip_sign_ops (tree exp)
16975 {
16976 tree arg0, arg1;
16977 location_t loc = EXPR_LOCATION (exp);
16978
16979 switch (TREE_CODE (exp))
16980 {
16981 case ABS_EXPR:
16982 case NEGATE_EXPR:
16983 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16984 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16985
16986 case MULT_EXPR:
16987 case RDIV_EXPR:
16988 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16989 return NULL_TREE;
16990 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16991 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16992 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16993 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16994 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16995 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16996 break;
16997
16998 case COMPOUND_EXPR:
16999 arg0 = TREE_OPERAND (exp, 0);
17000 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17001 if (arg1)
17002 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17003 break;
17004
17005 case COND_EXPR:
17006 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17007 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17008 if (arg0 || arg1)
17009 return fold_build3_loc (loc,
17010 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17011 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17012 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17013 break;
17014
17015 case CALL_EXPR:
17016 {
17017 const enum built_in_function fcode = builtin_mathfn_code (exp);
17018 switch (fcode)
17019 {
17020 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17021 /* Strip copysign function call, return the 1st argument. */
17022 arg0 = CALL_EXPR_ARG (exp, 0);
17023 arg1 = CALL_EXPR_ARG (exp, 1);
17024 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17025
17026 default:
17027 /* Strip sign ops from the argument of "odd" math functions. */
17028 if (negate_mathfn_p (fcode))
17029 {
17030 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17031 if (arg0)
17032 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17033 }
17034 break;
17035 }
17036 }
17037 break;
17038
17039 default:
17040 break;
17041 }
17042 return NULL_TREE;
17043 }