re PR rtl-optimization/57381 (array of volatile pointers hangs gcc)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "tree-flow.h"
62
63 /* Nonzero if we are folding constants inside an initializer; zero
64 otherwise. */
65 int folding_initializer = 0;
66
67 /* The following constants represent a bit based encoding of GCC's
68 comparison operators. This encoding simplifies transformations
69 on relational comparison operators, such as AND and OR. */
70 enum comparison_code {
71 COMPCODE_FALSE = 0,
72 COMPCODE_LT = 1,
73 COMPCODE_EQ = 2,
74 COMPCODE_LE = 3,
75 COMPCODE_GT = 4,
76 COMPCODE_LTGT = 5,
77 COMPCODE_GE = 6,
78 COMPCODE_ORD = 7,
79 COMPCODE_UNORD = 8,
80 COMPCODE_UNLT = 9,
81 COMPCODE_UNEQ = 10,
82 COMPCODE_UNLE = 11,
83 COMPCODE_UNGT = 12,
84 COMPCODE_NE = 13,
85 COMPCODE_UNGE = 14,
86 COMPCODE_TRUE = 15
87 };
88
89 static bool negate_mathfn_p (enum built_in_function);
90 static bool negate_expr_p (tree);
91 static tree negate_expr (tree);
92 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
93 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
94 static tree const_binop (enum tree_code, tree, tree);
95 static enum comparison_code comparison_to_compcode (enum tree_code);
96 static enum tree_code compcode_to_comparison (enum comparison_code);
97 static int operand_equal_for_comparison_p (tree, tree, tree);
98 static int twoval_comparison_p (tree, tree *, tree *, int *);
99 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
100 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
101 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
102 static tree make_bit_field_ref (location_t, tree, tree,
103 HOST_WIDE_INT, HOST_WIDE_INT, int);
104 static tree optimize_bit_field_compare (location_t, enum tree_code,
105 tree, tree, tree);
106 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
107 HOST_WIDE_INT *,
108 enum machine_mode *, int *, int *,
109 tree *, tree *);
110 static int all_ones_mask_p (const_tree, int);
111 static tree sign_bit_p (tree, const_tree);
112 static int simple_operand_p (const_tree);
113 static bool simple_operand_p_2 (tree);
114 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
115 static tree range_predecessor (tree);
116 static tree range_successor (tree);
117 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
118 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
119 static tree unextend (tree, int, int, tree);
120 static tree optimize_minmax_comparison (location_t, enum tree_code,
121 tree, tree, tree);
122 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
123 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
124 static tree fold_binary_op_with_conditional_arg (location_t,
125 enum tree_code, tree,
126 tree, tree,
127 tree, tree, int);
128 static tree fold_mathfn_compare (location_t,
129 enum built_in_function, enum tree_code,
130 tree, tree, tree);
131 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
132 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
133 static bool reorder_operands_p (const_tree, const_tree);
134 static tree fold_negate_const (tree, tree);
135 static tree fold_not_const (const_tree, tree);
136 static tree fold_relational_const (enum tree_code, tree, tree, tree);
137 static tree fold_convert_const (enum tree_code, tree, tree);
138
139 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
140 Otherwise, return LOC. */
141
142 static location_t
143 expr_location_or (tree t, location_t loc)
144 {
145 location_t tloc = EXPR_LOCATION (t);
146 return tloc == UNKNOWN_LOCATION ? loc : tloc;
147 }
148
149 /* Similar to protected_set_expr_location, but never modify x in place,
150 if location can and needs to be set, unshare it. */
151
152 static inline tree
153 protected_set_expr_location_unshare (tree x, location_t loc)
154 {
155 if (CAN_HAVE_LOCATION_P (x)
156 && EXPR_LOCATION (x) != loc
157 && !(TREE_CODE (x) == SAVE_EXPR
158 || TREE_CODE (x) == TARGET_EXPR
159 || TREE_CODE (x) == BIND_EXPR))
160 {
161 x = copy_node (x);
162 SET_EXPR_LOCATION (x, loc);
163 }
164 return x;
165 }
166 \f
167 /* If ARG2 divides ARG1 with zero remainder, carries out the division
168 of type CODE and returns the quotient.
169 Otherwise returns NULL_TREE. */
170
171 tree
172 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
173 {
174 double_int quo, rem;
175 int uns;
176
177 /* The sign of the division is according to operand two, that
178 does the correct thing for POINTER_PLUS_EXPR where we want
179 a signed division. */
180 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
181
182 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
183 uns, code, &rem);
184
185 if (rem.is_zero ())
186 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
187
188 return NULL_TREE;
189 }
190 \f
191 /* This is nonzero if we should defer warnings about undefined
192 overflow. This facility exists because these warnings are a
193 special case. The code to estimate loop iterations does not want
194 to issue any warnings, since it works with expressions which do not
195 occur in user code. Various bits of cleanup code call fold(), but
196 only use the result if it has certain characteristics (e.g., is a
197 constant); that code only wants to issue a warning if the result is
198 used. */
199
200 static int fold_deferring_overflow_warnings;
201
202 /* If a warning about undefined overflow is deferred, this is the
203 warning. Note that this may cause us to turn two warnings into
204 one, but that is fine since it is sufficient to only give one
205 warning per expression. */
206
207 static const char* fold_deferred_overflow_warning;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 level at which the warning should be emitted. */
211
212 static enum warn_strict_overflow_code fold_deferred_overflow_code;
213
214 /* Start deferring overflow warnings. We could use a stack here to
215 permit nested calls, but at present it is not necessary. */
216
217 void
218 fold_defer_overflow_warnings (void)
219 {
220 ++fold_deferring_overflow_warnings;
221 }
222
223 /* Stop deferring overflow warnings. If there is a pending warning,
224 and ISSUE is true, then issue the warning if appropriate. STMT is
225 the statement with which the warning should be associated (used for
226 location information); STMT may be NULL. CODE is the level of the
227 warning--a warn_strict_overflow_code value. This function will use
228 the smaller of CODE and the deferred code when deciding whether to
229 issue the warning. CODE may be zero to mean to always use the
230 deferred code. */
231
232 void
233 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
234 {
235 const char *warnmsg;
236 location_t locus;
237
238 gcc_assert (fold_deferring_overflow_warnings > 0);
239 --fold_deferring_overflow_warnings;
240 if (fold_deferring_overflow_warnings > 0)
241 {
242 if (fold_deferred_overflow_warning != NULL
243 && code != 0
244 && code < (int) fold_deferred_overflow_code)
245 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
246 return;
247 }
248
249 warnmsg = fold_deferred_overflow_warning;
250 fold_deferred_overflow_warning = NULL;
251
252 if (!issue || warnmsg == NULL)
253 return;
254
255 if (gimple_no_warning_p (stmt))
256 return;
257
258 /* Use the smallest code level when deciding to issue the
259 warning. */
260 if (code == 0 || code > (int) fold_deferred_overflow_code)
261 code = fold_deferred_overflow_code;
262
263 if (!issue_strict_overflow_warning (code))
264 return;
265
266 if (stmt == NULL)
267 locus = input_location;
268 else
269 locus = gimple_location (stmt);
270 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
271 }
272
273 /* Stop deferring overflow warnings, ignoring any deferred
274 warnings. */
275
276 void
277 fold_undefer_and_ignore_overflow_warnings (void)
278 {
279 fold_undefer_overflow_warnings (false, NULL, 0);
280 }
281
282 /* Whether we are deferring overflow warnings. */
283
284 bool
285 fold_deferring_overflow_warnings_p (void)
286 {
287 return fold_deferring_overflow_warnings > 0;
288 }
289
290 /* This is called when we fold something based on the fact that signed
291 overflow is undefined. */
292
293 static void
294 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
295 {
296 if (fold_deferring_overflow_warnings > 0)
297 {
298 if (fold_deferred_overflow_warning == NULL
299 || wc < fold_deferred_overflow_code)
300 {
301 fold_deferred_overflow_warning = gmsgid;
302 fold_deferred_overflow_code = wc;
303 }
304 }
305 else if (issue_strict_overflow_warning (wc))
306 warning (OPT_Wstrict_overflow, gmsgid);
307 }
308 \f
309 /* Return true if the built-in mathematical function specified by CODE
310 is odd, i.e. -f(x) == f(-x). */
311
312 static bool
313 negate_mathfn_p (enum built_in_function code)
314 {
315 switch (code)
316 {
317 CASE_FLT_FN (BUILT_IN_ASIN):
318 CASE_FLT_FN (BUILT_IN_ASINH):
319 CASE_FLT_FN (BUILT_IN_ATAN):
320 CASE_FLT_FN (BUILT_IN_ATANH):
321 CASE_FLT_FN (BUILT_IN_CASIN):
322 CASE_FLT_FN (BUILT_IN_CASINH):
323 CASE_FLT_FN (BUILT_IN_CATAN):
324 CASE_FLT_FN (BUILT_IN_CATANH):
325 CASE_FLT_FN (BUILT_IN_CBRT):
326 CASE_FLT_FN (BUILT_IN_CPROJ):
327 CASE_FLT_FN (BUILT_IN_CSIN):
328 CASE_FLT_FN (BUILT_IN_CSINH):
329 CASE_FLT_FN (BUILT_IN_CTAN):
330 CASE_FLT_FN (BUILT_IN_CTANH):
331 CASE_FLT_FN (BUILT_IN_ERF):
332 CASE_FLT_FN (BUILT_IN_LLROUND):
333 CASE_FLT_FN (BUILT_IN_LROUND):
334 CASE_FLT_FN (BUILT_IN_ROUND):
335 CASE_FLT_FN (BUILT_IN_SIN):
336 CASE_FLT_FN (BUILT_IN_SINH):
337 CASE_FLT_FN (BUILT_IN_TAN):
338 CASE_FLT_FN (BUILT_IN_TANH):
339 CASE_FLT_FN (BUILT_IN_TRUNC):
340 return true;
341
342 CASE_FLT_FN (BUILT_IN_LLRINT):
343 CASE_FLT_FN (BUILT_IN_LRINT):
344 CASE_FLT_FN (BUILT_IN_NEARBYINT):
345 CASE_FLT_FN (BUILT_IN_RINT):
346 return !flag_rounding_math;
347
348 default:
349 break;
350 }
351 return false;
352 }
353
354 /* Check whether we may negate an integer constant T without causing
355 overflow. */
356
357 bool
358 may_negate_without_overflow_p (const_tree t)
359 {
360 unsigned HOST_WIDE_INT val;
361 unsigned int prec;
362 tree type;
363
364 gcc_assert (TREE_CODE (t) == INTEGER_CST);
365
366 type = TREE_TYPE (t);
367 if (TYPE_UNSIGNED (type))
368 return false;
369
370 prec = TYPE_PRECISION (type);
371 if (prec > HOST_BITS_PER_WIDE_INT)
372 {
373 if (TREE_INT_CST_LOW (t) != 0)
374 return true;
375 prec -= HOST_BITS_PER_WIDE_INT;
376 val = TREE_INT_CST_HIGH (t);
377 }
378 else
379 val = TREE_INT_CST_LOW (t);
380 if (prec < HOST_BITS_PER_WIDE_INT)
381 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
382 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
383 }
384
385 /* Determine whether an expression T can be cheaply negated using
386 the function negate_expr without introducing undefined overflow. */
387
388 static bool
389 negate_expr_p (tree t)
390 {
391 tree type;
392
393 if (t == 0)
394 return false;
395
396 type = TREE_TYPE (t);
397
398 STRIP_SIGN_NOPS (t);
399 switch (TREE_CODE (t))
400 {
401 case INTEGER_CST:
402 if (TYPE_OVERFLOW_WRAPS (type))
403 return true;
404
405 /* Check that -CST will not overflow type. */
406 return may_negate_without_overflow_p (t);
407 case BIT_NOT_EXPR:
408 return (INTEGRAL_TYPE_P (type)
409 && TYPE_OVERFLOW_WRAPS (type));
410
411 case FIXED_CST:
412 case NEGATE_EXPR:
413 return true;
414
415 case REAL_CST:
416 /* We want to canonicalize to positive real constants. Pretend
417 that only negative ones can be easily negated. */
418 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
419
420 case COMPLEX_CST:
421 return negate_expr_p (TREE_REALPART (t))
422 && negate_expr_p (TREE_IMAGPART (t));
423
424 case COMPLEX_EXPR:
425 return negate_expr_p (TREE_OPERAND (t, 0))
426 && negate_expr_p (TREE_OPERAND (t, 1));
427
428 case CONJ_EXPR:
429 return negate_expr_p (TREE_OPERAND (t, 0));
430
431 case PLUS_EXPR:
432 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
433 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
434 return false;
435 /* -(A + B) -> (-B) - A. */
436 if (negate_expr_p (TREE_OPERAND (t, 1))
437 && reorder_operands_p (TREE_OPERAND (t, 0),
438 TREE_OPERAND (t, 1)))
439 return true;
440 /* -(A + B) -> (-A) - B. */
441 return negate_expr_p (TREE_OPERAND (t, 0));
442
443 case MINUS_EXPR:
444 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
445 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
446 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1));
449
450 case MULT_EXPR:
451 if (TYPE_UNSIGNED (TREE_TYPE (t)))
452 break;
453
454 /* Fall through. */
455
456 case RDIV_EXPR:
457 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
458 return negate_expr_p (TREE_OPERAND (t, 1))
459 || negate_expr_p (TREE_OPERAND (t, 0));
460 break;
461
462 case TRUNC_DIV_EXPR:
463 case ROUND_DIV_EXPR:
464 case FLOOR_DIV_EXPR:
465 case CEIL_DIV_EXPR:
466 case EXACT_DIV_EXPR:
467 /* In general we can't negate A / B, because if A is INT_MIN and
468 B is 1, we may turn this into INT_MIN / -1 which is undefined
469 and actually traps on some architectures. But if overflow is
470 undefined, we can negate, because - (INT_MIN / 1) is an
471 overflow. */
472 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
473 && !TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
474 break;
475 return negate_expr_p (TREE_OPERAND (t, 1))
476 || negate_expr_p (TREE_OPERAND (t, 0));
477
478 case NOP_EXPR:
479 /* Negate -((double)float) as (double)(-float). */
480 if (TREE_CODE (type) == REAL_TYPE)
481 {
482 tree tem = strip_float_extensions (t);
483 if (tem != t)
484 return negate_expr_p (tem);
485 }
486 break;
487
488 case CALL_EXPR:
489 /* Negate -f(x) as f(-x). */
490 if (negate_mathfn_p (builtin_mathfn_code (t)))
491 return negate_expr_p (CALL_EXPR_ARG (t, 0));
492 break;
493
494 case RSHIFT_EXPR:
495 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
496 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
497 {
498 tree op1 = TREE_OPERAND (t, 1);
499 if (TREE_INT_CST_HIGH (op1) == 0
500 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
501 == TREE_INT_CST_LOW (op1))
502 return true;
503 }
504 break;
505
506 default:
507 break;
508 }
509 return false;
510 }
511
512 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
513 simplification is possible.
514 If negate_expr_p would return true for T, NULL_TREE will never be
515 returned. */
516
517 static tree
518 fold_negate_expr (location_t loc, tree t)
519 {
520 tree type = TREE_TYPE (t);
521 tree tem;
522
523 switch (TREE_CODE (t))
524 {
525 /* Convert - (~A) to A + 1. */
526 case BIT_NOT_EXPR:
527 if (INTEGRAL_TYPE_P (type))
528 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
529 build_one_cst (type));
530 break;
531
532 case INTEGER_CST:
533 tem = fold_negate_const (t, type);
534 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
535 || !TYPE_OVERFLOW_TRAPS (type))
536 return tem;
537 break;
538
539 case REAL_CST:
540 tem = fold_negate_const (t, type);
541 /* Two's complement FP formats, such as c4x, may overflow. */
542 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
543 return tem;
544 break;
545
546 case FIXED_CST:
547 tem = fold_negate_const (t, type);
548 return tem;
549
550 case COMPLEX_CST:
551 {
552 tree rpart = negate_expr (TREE_REALPART (t));
553 tree ipart = negate_expr (TREE_IMAGPART (t));
554
555 if ((TREE_CODE (rpart) == REAL_CST
556 && TREE_CODE (ipart) == REAL_CST)
557 || (TREE_CODE (rpart) == INTEGER_CST
558 && TREE_CODE (ipart) == INTEGER_CST))
559 return build_complex (type, rpart, ipart);
560 }
561 break;
562
563 case COMPLEX_EXPR:
564 if (negate_expr_p (t))
565 return fold_build2_loc (loc, COMPLEX_EXPR, type,
566 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
567 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
568 break;
569
570 case CONJ_EXPR:
571 if (negate_expr_p (t))
572 return fold_build1_loc (loc, CONJ_EXPR, type,
573 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
574 break;
575
576 case NEGATE_EXPR:
577 return TREE_OPERAND (t, 0);
578
579 case PLUS_EXPR:
580 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
581 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
582 {
583 /* -(A + B) -> (-B) - A. */
584 if (negate_expr_p (TREE_OPERAND (t, 1))
585 && reorder_operands_p (TREE_OPERAND (t, 0),
586 TREE_OPERAND (t, 1)))
587 {
588 tem = negate_expr (TREE_OPERAND (t, 1));
589 return fold_build2_loc (loc, MINUS_EXPR, type,
590 tem, TREE_OPERAND (t, 0));
591 }
592
593 /* -(A + B) -> (-A) - B. */
594 if (negate_expr_p (TREE_OPERAND (t, 0)))
595 {
596 tem = negate_expr (TREE_OPERAND (t, 0));
597 return fold_build2_loc (loc, MINUS_EXPR, type,
598 tem, TREE_OPERAND (t, 1));
599 }
600 }
601 break;
602
603 case MINUS_EXPR:
604 /* - (A - B) -> B - A */
605 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
606 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
607 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
608 return fold_build2_loc (loc, MINUS_EXPR, type,
609 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
610 break;
611
612 case MULT_EXPR:
613 if (TYPE_UNSIGNED (type))
614 break;
615
616 /* Fall through. */
617
618 case RDIV_EXPR:
619 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
620 {
621 tem = TREE_OPERAND (t, 1);
622 if (negate_expr_p (tem))
623 return fold_build2_loc (loc, TREE_CODE (t), type,
624 TREE_OPERAND (t, 0), negate_expr (tem));
625 tem = TREE_OPERAND (t, 0);
626 if (negate_expr_p (tem))
627 return fold_build2_loc (loc, TREE_CODE (t), type,
628 negate_expr (tem), TREE_OPERAND (t, 1));
629 }
630 break;
631
632 case TRUNC_DIV_EXPR:
633 case ROUND_DIV_EXPR:
634 case FLOOR_DIV_EXPR:
635 case CEIL_DIV_EXPR:
636 case EXACT_DIV_EXPR:
637 /* In general we can't negate A / B, because if A is INT_MIN and
638 B is 1, we may turn this into INT_MIN / -1 which is undefined
639 and actually traps on some architectures. But if overflow is
640 undefined, we can negate, because - (INT_MIN / 1) is an
641 overflow. */
642 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
643 {
644 const char * const warnmsg = G_("assuming signed overflow does not "
645 "occur when negating a division");
646 tem = TREE_OPERAND (t, 1);
647 if (negate_expr_p (tem))
648 {
649 if (INTEGRAL_TYPE_P (type)
650 && (TREE_CODE (tem) != INTEGER_CST
651 || integer_onep (tem)))
652 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
653 return fold_build2_loc (loc, TREE_CODE (t), type,
654 TREE_OPERAND (t, 0), negate_expr (tem));
655 }
656 tem = TREE_OPERAND (t, 0);
657 if (negate_expr_p (tem))
658 {
659 if (INTEGRAL_TYPE_P (type)
660 && (TREE_CODE (tem) != INTEGER_CST
661 || tree_int_cst_equal (tem, TYPE_MIN_VALUE (type))))
662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
663 return fold_build2_loc (loc, TREE_CODE (t), type,
664 negate_expr (tem), TREE_OPERAND (t, 1));
665 }
666 }
667 break;
668
669 case NOP_EXPR:
670 /* Convert -((double)float) into (double)(-float). */
671 if (TREE_CODE (type) == REAL_TYPE)
672 {
673 tem = strip_float_extensions (t);
674 if (tem != t && negate_expr_p (tem))
675 return fold_convert_loc (loc, type, negate_expr (tem));
676 }
677 break;
678
679 case CALL_EXPR:
680 /* Negate -f(x) as f(-x). */
681 if (negate_mathfn_p (builtin_mathfn_code (t))
682 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
683 {
684 tree fndecl, arg;
685
686 fndecl = get_callee_fndecl (t);
687 arg = negate_expr (CALL_EXPR_ARG (t, 0));
688 return build_call_expr_loc (loc, fndecl, 1, arg);
689 }
690 break;
691
692 case RSHIFT_EXPR:
693 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
694 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
695 {
696 tree op1 = TREE_OPERAND (t, 1);
697 if (TREE_INT_CST_HIGH (op1) == 0
698 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
699 == TREE_INT_CST_LOW (op1))
700 {
701 tree ntype = TYPE_UNSIGNED (type)
702 ? signed_type_for (type)
703 : unsigned_type_for (type);
704 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
705 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
706 return fold_convert_loc (loc, type, temp);
707 }
708 }
709 break;
710
711 default:
712 break;
713 }
714
715 return NULL_TREE;
716 }
717
718 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
719 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
720 return NULL_TREE. */
721
722 static tree
723 negate_expr (tree t)
724 {
725 tree type, tem;
726 location_t loc;
727
728 if (t == NULL_TREE)
729 return NULL_TREE;
730
731 loc = EXPR_LOCATION (t);
732 type = TREE_TYPE (t);
733 STRIP_SIGN_NOPS (t);
734
735 tem = fold_negate_expr (loc, t);
736 if (!tem)
737 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
738 return fold_convert_loc (loc, type, tem);
739 }
740 \f
741 /* Split a tree IN into a constant, literal and variable parts that could be
742 combined with CODE to make IN. "constant" means an expression with
743 TREE_CONSTANT but that isn't an actual constant. CODE must be a
744 commutative arithmetic operation. Store the constant part into *CONP,
745 the literal in *LITP and return the variable part. If a part isn't
746 present, set it to null. If the tree does not decompose in this way,
747 return the entire tree as the variable part and the other parts as null.
748
749 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
750 case, we negate an operand that was subtracted. Except if it is a
751 literal for which we use *MINUS_LITP instead.
752
753 If NEGATE_P is true, we are negating all of IN, again except a literal
754 for which we use *MINUS_LITP instead.
755
756 If IN is itself a literal or constant, return it as appropriate.
757
758 Note that we do not guarantee that any of the three values will be the
759 same type as IN, but they will have the same signedness and mode. */
760
761 static tree
762 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
763 tree *minus_litp, int negate_p)
764 {
765 tree var = 0;
766
767 *conp = 0;
768 *litp = 0;
769 *minus_litp = 0;
770
771 /* Strip any conversions that don't change the machine mode or signedness. */
772 STRIP_SIGN_NOPS (in);
773
774 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
775 || TREE_CODE (in) == FIXED_CST)
776 *litp = in;
777 else if (TREE_CODE (in) == code
778 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
779 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
780 /* We can associate addition and subtraction together (even
781 though the C standard doesn't say so) for integers because
782 the value is not affected. For reals, the value might be
783 affected, so we can't. */
784 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
785 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
786 {
787 tree op0 = TREE_OPERAND (in, 0);
788 tree op1 = TREE_OPERAND (in, 1);
789 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
790 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
791
792 /* First see if either of the operands is a literal, then a constant. */
793 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
794 || TREE_CODE (op0) == FIXED_CST)
795 *litp = op0, op0 = 0;
796 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
797 || TREE_CODE (op1) == FIXED_CST)
798 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
799
800 if (op0 != 0 && TREE_CONSTANT (op0))
801 *conp = op0, op0 = 0;
802 else if (op1 != 0 && TREE_CONSTANT (op1))
803 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
804
805 /* If we haven't dealt with either operand, this is not a case we can
806 decompose. Otherwise, VAR is either of the ones remaining, if any. */
807 if (op0 != 0 && op1 != 0)
808 var = in;
809 else if (op0 != 0)
810 var = op0;
811 else
812 var = op1, neg_var_p = neg1_p;
813
814 /* Now do any needed negations. */
815 if (neg_litp_p)
816 *minus_litp = *litp, *litp = 0;
817 if (neg_conp_p)
818 *conp = negate_expr (*conp);
819 if (neg_var_p)
820 var = negate_expr (var);
821 }
822 else if (TREE_CODE (in) == BIT_NOT_EXPR
823 && code == PLUS_EXPR)
824 {
825 /* -X - 1 is folded to ~X, undo that here. */
826 *minus_litp = build_one_cst (TREE_TYPE (in));
827 var = negate_expr (TREE_OPERAND (in, 0));
828 }
829 else if (TREE_CONSTANT (in))
830 *conp = in;
831 else
832 var = in;
833
834 if (negate_p)
835 {
836 if (*litp)
837 *minus_litp = *litp, *litp = 0;
838 else if (*minus_litp)
839 *litp = *minus_litp, *minus_litp = 0;
840 *conp = negate_expr (*conp);
841 var = negate_expr (var);
842 }
843
844 return var;
845 }
846
847 /* Re-associate trees split by the above function. T1 and T2 are
848 either expressions to associate or null. Return the new
849 expression, if any. LOC is the location of the new expression. If
850 we build an operation, do it in TYPE and with CODE. */
851
852 static tree
853 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
854 {
855 if (t1 == 0)
856 return t2;
857 else if (t2 == 0)
858 return t1;
859
860 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
861 try to fold this since we will have infinite recursion. But do
862 deal with any NEGATE_EXPRs. */
863 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
864 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
865 {
866 if (code == PLUS_EXPR)
867 {
868 if (TREE_CODE (t1) == NEGATE_EXPR)
869 return build2_loc (loc, MINUS_EXPR, type,
870 fold_convert_loc (loc, type, t2),
871 fold_convert_loc (loc, type,
872 TREE_OPERAND (t1, 0)));
873 else if (TREE_CODE (t2) == NEGATE_EXPR)
874 return build2_loc (loc, MINUS_EXPR, type,
875 fold_convert_loc (loc, type, t1),
876 fold_convert_loc (loc, type,
877 TREE_OPERAND (t2, 0)));
878 else if (integer_zerop (t2))
879 return fold_convert_loc (loc, type, t1);
880 }
881 else if (code == MINUS_EXPR)
882 {
883 if (integer_zerop (t2))
884 return fold_convert_loc (loc, type, t1);
885 }
886
887 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
888 fold_convert_loc (loc, type, t2));
889 }
890
891 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type, t2));
893 }
894 \f
895 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
896 for use in int_const_binop, size_binop and size_diffop. */
897
898 static bool
899 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
900 {
901 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
902 return false;
903 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
904 return false;
905
906 switch (code)
907 {
908 case LSHIFT_EXPR:
909 case RSHIFT_EXPR:
910 case LROTATE_EXPR:
911 case RROTATE_EXPR:
912 return true;
913
914 default:
915 break;
916 }
917
918 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
919 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
920 && TYPE_MODE (type1) == TYPE_MODE (type2);
921 }
922
923
924 /* Combine two integer constants ARG1 and ARG2 under operation CODE
925 to produce a new constant. Return NULL_TREE if we don't know how
926 to evaluate CODE at compile-time. */
927
928 static tree
929 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
930 int overflowable)
931 {
932 double_int op1, op2, res, tmp;
933 tree t;
934 tree type = TREE_TYPE (arg1);
935 bool uns = TYPE_UNSIGNED (type);
936 bool overflow = false;
937
938 op1 = tree_to_double_int (arg1);
939 op2 = tree_to_double_int (arg2);
940
941 switch (code)
942 {
943 case BIT_IOR_EXPR:
944 res = op1 | op2;
945 break;
946
947 case BIT_XOR_EXPR:
948 res = op1 ^ op2;
949 break;
950
951 case BIT_AND_EXPR:
952 res = op1 & op2;
953 break;
954
955 case RSHIFT_EXPR:
956 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
957 break;
958
959 case LSHIFT_EXPR:
960 /* It's unclear from the C standard whether shifts can overflow.
961 The following code ignores overflow; perhaps a C standard
962 interpretation ruling is needed. */
963 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
964 break;
965
966 case RROTATE_EXPR:
967 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
968 break;
969
970 case LROTATE_EXPR:
971 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
972 break;
973
974 case PLUS_EXPR:
975 res = op1.add_with_sign (op2, false, &overflow);
976 break;
977
978 case MINUS_EXPR:
979 res = op1.sub_with_overflow (op2, &overflow);
980 break;
981
982 case MULT_EXPR:
983 res = op1.mul_with_sign (op2, false, &overflow);
984 break;
985
986 case MULT_HIGHPART_EXPR:
987 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
988 {
989 bool dummy_overflow;
990 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
991 return NULL_TREE;
992 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
993 }
994 else
995 {
996 bool dummy_overflow;
997 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
998 is performed in twice the precision of arguments. */
999 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1000 res = tmp.rshift (TYPE_PRECISION (type),
1001 2 * TYPE_PRECISION (type), !uns);
1002 }
1003 break;
1004
1005 case TRUNC_DIV_EXPR:
1006 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1007 case EXACT_DIV_EXPR:
1008 /* This is a shortcut for a common special case. */
1009 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1010 && !TREE_OVERFLOW (arg1)
1011 && !TREE_OVERFLOW (arg2)
1012 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1013 {
1014 if (code == CEIL_DIV_EXPR)
1015 op1.low += op2.low - 1;
1016
1017 res.low = op1.low / op2.low, res.high = 0;
1018 break;
1019 }
1020
1021 /* ... fall through ... */
1022
1023 case ROUND_DIV_EXPR:
1024 if (op2.is_zero ())
1025 return NULL_TREE;
1026 if (op2.is_one ())
1027 {
1028 res = op1;
1029 break;
1030 }
1031 if (op1 == op2 && !op1.is_zero ())
1032 {
1033 res = double_int_one;
1034 break;
1035 }
1036 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1037 break;
1038
1039 case TRUNC_MOD_EXPR:
1040 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1041 /* This is a shortcut for a common special case. */
1042 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1043 && !TREE_OVERFLOW (arg1)
1044 && !TREE_OVERFLOW (arg2)
1045 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1046 {
1047 if (code == CEIL_MOD_EXPR)
1048 op1.low += op2.low - 1;
1049 res.low = op1.low % op2.low, res.high = 0;
1050 break;
1051 }
1052
1053 /* ... fall through ... */
1054
1055 case ROUND_MOD_EXPR:
1056 if (op2.is_zero ())
1057 return NULL_TREE;
1058 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1059 break;
1060
1061 case MIN_EXPR:
1062 res = op1.min (op2, uns);
1063 break;
1064
1065 case MAX_EXPR:
1066 res = op1.max (op2, uns);
1067 break;
1068
1069 default:
1070 return NULL_TREE;
1071 }
1072
1073 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1074 (!uns && overflow)
1075 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1076
1077 return t;
1078 }
1079
1080 tree
1081 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1082 {
1083 return int_const_binop_1 (code, arg1, arg2, 1);
1084 }
1085
1086 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1087 constant. We assume ARG1 and ARG2 have the same data type, or at least
1088 are the same kind of constant and the same machine mode. Return zero if
1089 combining the constants is not allowed in the current operating mode. */
1090
1091 static tree
1092 const_binop (enum tree_code code, tree arg1, tree arg2)
1093 {
1094 /* Sanity check for the recursive cases. */
1095 if (!arg1 || !arg2)
1096 return NULL_TREE;
1097
1098 STRIP_NOPS (arg1);
1099 STRIP_NOPS (arg2);
1100
1101 if (TREE_CODE (arg1) == INTEGER_CST)
1102 return int_const_binop (code, arg1, arg2);
1103
1104 if (TREE_CODE (arg1) == REAL_CST)
1105 {
1106 enum machine_mode mode;
1107 REAL_VALUE_TYPE d1;
1108 REAL_VALUE_TYPE d2;
1109 REAL_VALUE_TYPE value;
1110 REAL_VALUE_TYPE result;
1111 bool inexact;
1112 tree t, type;
1113
1114 /* The following codes are handled by real_arithmetic. */
1115 switch (code)
1116 {
1117 case PLUS_EXPR:
1118 case MINUS_EXPR:
1119 case MULT_EXPR:
1120 case RDIV_EXPR:
1121 case MIN_EXPR:
1122 case MAX_EXPR:
1123 break;
1124
1125 default:
1126 return NULL_TREE;
1127 }
1128
1129 d1 = TREE_REAL_CST (arg1);
1130 d2 = TREE_REAL_CST (arg2);
1131
1132 type = TREE_TYPE (arg1);
1133 mode = TYPE_MODE (type);
1134
1135 /* Don't perform operation if we honor signaling NaNs and
1136 either operand is a NaN. */
1137 if (HONOR_SNANS (mode)
1138 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1139 return NULL_TREE;
1140
1141 /* Don't perform operation if it would raise a division
1142 by zero exception. */
1143 if (code == RDIV_EXPR
1144 && REAL_VALUES_EQUAL (d2, dconst0)
1145 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1146 return NULL_TREE;
1147
1148 /* If either operand is a NaN, just return it. Otherwise, set up
1149 for floating-point trap; we return an overflow. */
1150 if (REAL_VALUE_ISNAN (d1))
1151 return arg1;
1152 else if (REAL_VALUE_ISNAN (d2))
1153 return arg2;
1154
1155 inexact = real_arithmetic (&value, code, &d1, &d2);
1156 real_convert (&result, mode, &value);
1157
1158 /* Don't constant fold this floating point operation if
1159 the result has overflowed and flag_trapping_math. */
1160 if (flag_trapping_math
1161 && MODE_HAS_INFINITIES (mode)
1162 && REAL_VALUE_ISINF (result)
1163 && !REAL_VALUE_ISINF (d1)
1164 && !REAL_VALUE_ISINF (d2))
1165 return NULL_TREE;
1166
1167 /* Don't constant fold this floating point operation if the
1168 result may dependent upon the run-time rounding mode and
1169 flag_rounding_math is set, or if GCC's software emulation
1170 is unable to accurately represent the result. */
1171 if ((flag_rounding_math
1172 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1173 && (inexact || !real_identical (&result, &value)))
1174 return NULL_TREE;
1175
1176 t = build_real (type, result);
1177
1178 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1179 return t;
1180 }
1181
1182 if (TREE_CODE (arg1) == FIXED_CST)
1183 {
1184 FIXED_VALUE_TYPE f1;
1185 FIXED_VALUE_TYPE f2;
1186 FIXED_VALUE_TYPE result;
1187 tree t, type;
1188 int sat_p;
1189 bool overflow_p;
1190
1191 /* The following codes are handled by fixed_arithmetic. */
1192 switch (code)
1193 {
1194 case PLUS_EXPR:
1195 case MINUS_EXPR:
1196 case MULT_EXPR:
1197 case TRUNC_DIV_EXPR:
1198 f2 = TREE_FIXED_CST (arg2);
1199 break;
1200
1201 case LSHIFT_EXPR:
1202 case RSHIFT_EXPR:
1203 f2.data.high = TREE_INT_CST_HIGH (arg2);
1204 f2.data.low = TREE_INT_CST_LOW (arg2);
1205 f2.mode = SImode;
1206 break;
1207
1208 default:
1209 return NULL_TREE;
1210 }
1211
1212 f1 = TREE_FIXED_CST (arg1);
1213 type = TREE_TYPE (arg1);
1214 sat_p = TYPE_SATURATING (type);
1215 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1216 t = build_fixed (type, result);
1217 /* Propagate overflow flags. */
1218 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1219 TREE_OVERFLOW (t) = 1;
1220 return t;
1221 }
1222
1223 if (TREE_CODE (arg1) == COMPLEX_CST)
1224 {
1225 tree type = TREE_TYPE (arg1);
1226 tree r1 = TREE_REALPART (arg1);
1227 tree i1 = TREE_IMAGPART (arg1);
1228 tree r2 = TREE_REALPART (arg2);
1229 tree i2 = TREE_IMAGPART (arg2);
1230 tree real, imag;
1231
1232 switch (code)
1233 {
1234 case PLUS_EXPR:
1235 case MINUS_EXPR:
1236 real = const_binop (code, r1, r2);
1237 imag = const_binop (code, i1, i2);
1238 break;
1239
1240 case MULT_EXPR:
1241 if (COMPLEX_FLOAT_TYPE_P (type))
1242 return do_mpc_arg2 (arg1, arg2, type,
1243 /* do_nonfinite= */ folding_initializer,
1244 mpc_mul);
1245
1246 real = const_binop (MINUS_EXPR,
1247 const_binop (MULT_EXPR, r1, r2),
1248 const_binop (MULT_EXPR, i1, i2));
1249 imag = const_binop (PLUS_EXPR,
1250 const_binop (MULT_EXPR, r1, i2),
1251 const_binop (MULT_EXPR, i1, r2));
1252 break;
1253
1254 case RDIV_EXPR:
1255 if (COMPLEX_FLOAT_TYPE_P (type))
1256 return do_mpc_arg2 (arg1, arg2, type,
1257 /* do_nonfinite= */ folding_initializer,
1258 mpc_div);
1259 /* Fallthru ... */
1260 case TRUNC_DIV_EXPR:
1261 case CEIL_DIV_EXPR:
1262 case FLOOR_DIV_EXPR:
1263 case ROUND_DIV_EXPR:
1264 if (flag_complex_method == 0)
1265 {
1266 /* Keep this algorithm in sync with
1267 tree-complex.c:expand_complex_div_straight().
1268
1269 Expand complex division to scalars, straightforward algorithm.
1270 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1271 t = br*br + bi*bi
1272 */
1273 tree magsquared
1274 = const_binop (PLUS_EXPR,
1275 const_binop (MULT_EXPR, r2, r2),
1276 const_binop (MULT_EXPR, i2, i2));
1277 tree t1
1278 = const_binop (PLUS_EXPR,
1279 const_binop (MULT_EXPR, r1, r2),
1280 const_binop (MULT_EXPR, i1, i2));
1281 tree t2
1282 = const_binop (MINUS_EXPR,
1283 const_binop (MULT_EXPR, i1, r2),
1284 const_binop (MULT_EXPR, r1, i2));
1285
1286 real = const_binop (code, t1, magsquared);
1287 imag = const_binop (code, t2, magsquared);
1288 }
1289 else
1290 {
1291 /* Keep this algorithm in sync with
1292 tree-complex.c:expand_complex_div_wide().
1293
1294 Expand complex division to scalars, modified algorithm to minimize
1295 overflow with wide input ranges. */
1296 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1297 fold_abs_const (r2, TREE_TYPE (type)),
1298 fold_abs_const (i2, TREE_TYPE (type)));
1299
1300 if (integer_nonzerop (compare))
1301 {
1302 /* In the TRUE branch, we compute
1303 ratio = br/bi;
1304 div = (br * ratio) + bi;
1305 tr = (ar * ratio) + ai;
1306 ti = (ai * ratio) - ar;
1307 tr = tr / div;
1308 ti = ti / div; */
1309 tree ratio = const_binop (code, r2, i2);
1310 tree div = const_binop (PLUS_EXPR, i2,
1311 const_binop (MULT_EXPR, r2, ratio));
1312 real = const_binop (MULT_EXPR, r1, ratio);
1313 real = const_binop (PLUS_EXPR, real, i1);
1314 real = const_binop (code, real, div);
1315
1316 imag = const_binop (MULT_EXPR, i1, ratio);
1317 imag = const_binop (MINUS_EXPR, imag, r1);
1318 imag = const_binop (code, imag, div);
1319 }
1320 else
1321 {
1322 /* In the FALSE branch, we compute
1323 ratio = d/c;
1324 divisor = (d * ratio) + c;
1325 tr = (b * ratio) + a;
1326 ti = b - (a * ratio);
1327 tr = tr / div;
1328 ti = ti / div; */
1329 tree ratio = const_binop (code, i2, r2);
1330 tree div = const_binop (PLUS_EXPR, r2,
1331 const_binop (MULT_EXPR, i2, ratio));
1332
1333 real = const_binop (MULT_EXPR, i1, ratio);
1334 real = const_binop (PLUS_EXPR, real, r1);
1335 real = const_binop (code, real, div);
1336
1337 imag = const_binop (MULT_EXPR, r1, ratio);
1338 imag = const_binop (MINUS_EXPR, i1, imag);
1339 imag = const_binop (code, imag, div);
1340 }
1341 }
1342 break;
1343
1344 default:
1345 return NULL_TREE;
1346 }
1347
1348 if (real && imag)
1349 return build_complex (type, real, imag);
1350 }
1351
1352 if (TREE_CODE (arg1) == VECTOR_CST
1353 && TREE_CODE (arg2) == VECTOR_CST)
1354 {
1355 tree type = TREE_TYPE (arg1);
1356 int count = TYPE_VECTOR_SUBPARTS (type), i;
1357 tree *elts = XALLOCAVEC (tree, count);
1358
1359 for (i = 0; i < count; i++)
1360 {
1361 tree elem1 = VECTOR_CST_ELT (arg1, i);
1362 tree elem2 = VECTOR_CST_ELT (arg2, i);
1363
1364 elts[i] = const_binop (code, elem1, elem2);
1365
1366 /* It is possible that const_binop cannot handle the given
1367 code and return NULL_TREE */
1368 if (elts[i] == NULL_TREE)
1369 return NULL_TREE;
1370 }
1371
1372 return build_vector (type, elts);
1373 }
1374
1375 /* Shifts allow a scalar offset for a vector. */
1376 if (TREE_CODE (arg1) == VECTOR_CST
1377 && TREE_CODE (arg2) == INTEGER_CST)
1378 {
1379 tree type = TREE_TYPE (arg1);
1380 int count = TYPE_VECTOR_SUBPARTS (type), i;
1381 tree *elts = XALLOCAVEC (tree, count);
1382
1383 if (code == VEC_LSHIFT_EXPR
1384 || code == VEC_RSHIFT_EXPR)
1385 {
1386 if (!host_integerp (arg2, 1))
1387 return NULL_TREE;
1388
1389 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1390 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1391 unsigned HOST_WIDE_INT innerc
1392 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1393 if (shiftc >= outerc || (shiftc % innerc) != 0)
1394 return NULL_TREE;
1395 int offset = shiftc / innerc;
1396 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1397 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1398 for !BYTES_BIG_ENDIAN picks first vector element, but
1399 for BYTES_BIG_ENDIAN last element from the vector. */
1400 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1401 offset = -offset;
1402 tree zero = build_zero_cst (TREE_TYPE (type));
1403 for (i = 0; i < count; i++)
1404 {
1405 if (i + offset < 0 || i + offset >= count)
1406 elts[i] = zero;
1407 else
1408 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1409 }
1410 }
1411 else
1412 for (i = 0; i < count; i++)
1413 {
1414 tree elem1 = VECTOR_CST_ELT (arg1, i);
1415
1416 elts[i] = const_binop (code, elem1, arg2);
1417
1418 /* It is possible that const_binop cannot handle the given
1419 code and return NULL_TREE */
1420 if (elts[i] == NULL_TREE)
1421 return NULL_TREE;
1422 }
1423
1424 return build_vector (type, elts);
1425 }
1426 return NULL_TREE;
1427 }
1428
1429 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1430 indicates which particular sizetype to create. */
1431
1432 tree
1433 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1434 {
1435 return build_int_cst (sizetype_tab[(int) kind], number);
1436 }
1437 \f
1438 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1439 is a tree code. The type of the result is taken from the operands.
1440 Both must be equivalent integer types, ala int_binop_types_match_p.
1441 If the operands are constant, so is the result. */
1442
1443 tree
1444 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1445 {
1446 tree type = TREE_TYPE (arg0);
1447
1448 if (arg0 == error_mark_node || arg1 == error_mark_node)
1449 return error_mark_node;
1450
1451 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1452 TREE_TYPE (arg1)));
1453
1454 /* Handle the special case of two integer constants faster. */
1455 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1456 {
1457 /* And some specific cases even faster than that. */
1458 if (code == PLUS_EXPR)
1459 {
1460 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1461 return arg1;
1462 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1463 return arg0;
1464 }
1465 else if (code == MINUS_EXPR)
1466 {
1467 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1468 return arg0;
1469 }
1470 else if (code == MULT_EXPR)
1471 {
1472 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1473 return arg1;
1474 }
1475
1476 /* Handle general case of two integer constants. For sizetype
1477 constant calculations we always want to know about overflow,
1478 even in the unsigned case. */
1479 return int_const_binop_1 (code, arg0, arg1, -1);
1480 }
1481
1482 return fold_build2_loc (loc, code, type, arg0, arg1);
1483 }
1484
1485 /* Given two values, either both of sizetype or both of bitsizetype,
1486 compute the difference between the two values. Return the value
1487 in signed type corresponding to the type of the operands. */
1488
1489 tree
1490 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1491 {
1492 tree type = TREE_TYPE (arg0);
1493 tree ctype;
1494
1495 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1496 TREE_TYPE (arg1)));
1497
1498 /* If the type is already signed, just do the simple thing. */
1499 if (!TYPE_UNSIGNED (type))
1500 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1501
1502 if (type == sizetype)
1503 ctype = ssizetype;
1504 else if (type == bitsizetype)
1505 ctype = sbitsizetype;
1506 else
1507 ctype = signed_type_for (type);
1508
1509 /* If either operand is not a constant, do the conversions to the signed
1510 type and subtract. The hardware will do the right thing with any
1511 overflow in the subtraction. */
1512 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1513 return size_binop_loc (loc, MINUS_EXPR,
1514 fold_convert_loc (loc, ctype, arg0),
1515 fold_convert_loc (loc, ctype, arg1));
1516
1517 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1518 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1519 overflow) and negate (which can't either). Special-case a result
1520 of zero while we're here. */
1521 if (tree_int_cst_equal (arg0, arg1))
1522 return build_int_cst (ctype, 0);
1523 else if (tree_int_cst_lt (arg1, arg0))
1524 return fold_convert_loc (loc, ctype,
1525 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1526 else
1527 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1528 fold_convert_loc (loc, ctype,
1529 size_binop_loc (loc,
1530 MINUS_EXPR,
1531 arg1, arg0)));
1532 }
1533 \f
1534 /* A subroutine of fold_convert_const handling conversions of an
1535 INTEGER_CST to another integer type. */
1536
1537 static tree
1538 fold_convert_const_int_from_int (tree type, const_tree arg1)
1539 {
1540 tree t;
1541
1542 /* Given an integer constant, make new constant with new type,
1543 appropriately sign-extended or truncated. */
1544 t = force_fit_type_double (type, tree_to_double_int (arg1),
1545 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1546 (TREE_INT_CST_HIGH (arg1) < 0
1547 && (TYPE_UNSIGNED (type)
1548 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1549 | TREE_OVERFLOW (arg1));
1550
1551 return t;
1552 }
1553
1554 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1555 to an integer type. */
1556
1557 static tree
1558 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1559 {
1560 int overflow = 0;
1561 tree t;
1562
1563 /* The following code implements the floating point to integer
1564 conversion rules required by the Java Language Specification,
1565 that IEEE NaNs are mapped to zero and values that overflow
1566 the target precision saturate, i.e. values greater than
1567 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1568 are mapped to INT_MIN. These semantics are allowed by the
1569 C and C++ standards that simply state that the behavior of
1570 FP-to-integer conversion is unspecified upon overflow. */
1571
1572 double_int val;
1573 REAL_VALUE_TYPE r;
1574 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1575
1576 switch (code)
1577 {
1578 case FIX_TRUNC_EXPR:
1579 real_trunc (&r, VOIDmode, &x);
1580 break;
1581
1582 default:
1583 gcc_unreachable ();
1584 }
1585
1586 /* If R is NaN, return zero and show we have an overflow. */
1587 if (REAL_VALUE_ISNAN (r))
1588 {
1589 overflow = 1;
1590 val = double_int_zero;
1591 }
1592
1593 /* See if R is less than the lower bound or greater than the
1594 upper bound. */
1595
1596 if (! overflow)
1597 {
1598 tree lt = TYPE_MIN_VALUE (type);
1599 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1600 if (REAL_VALUES_LESS (r, l))
1601 {
1602 overflow = 1;
1603 val = tree_to_double_int (lt);
1604 }
1605 }
1606
1607 if (! overflow)
1608 {
1609 tree ut = TYPE_MAX_VALUE (type);
1610 if (ut)
1611 {
1612 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1613 if (REAL_VALUES_LESS (u, r))
1614 {
1615 overflow = 1;
1616 val = tree_to_double_int (ut);
1617 }
1618 }
1619 }
1620
1621 if (! overflow)
1622 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1623
1624 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1625 return t;
1626 }
1627
1628 /* A subroutine of fold_convert_const handling conversions of a
1629 FIXED_CST to an integer type. */
1630
1631 static tree
1632 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1633 {
1634 tree t;
1635 double_int temp, temp_trunc;
1636 unsigned int mode;
1637
1638 /* Right shift FIXED_CST to temp by fbit. */
1639 temp = TREE_FIXED_CST (arg1).data;
1640 mode = TREE_FIXED_CST (arg1).mode;
1641 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1642 {
1643 temp = temp.rshift (GET_MODE_FBIT (mode),
1644 HOST_BITS_PER_DOUBLE_INT,
1645 SIGNED_FIXED_POINT_MODE_P (mode));
1646
1647 /* Left shift temp to temp_trunc by fbit. */
1648 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1649 HOST_BITS_PER_DOUBLE_INT,
1650 SIGNED_FIXED_POINT_MODE_P (mode));
1651 }
1652 else
1653 {
1654 temp = double_int_zero;
1655 temp_trunc = double_int_zero;
1656 }
1657
1658 /* If FIXED_CST is negative, we need to round the value toward 0.
1659 By checking if the fractional bits are not zero to add 1 to temp. */
1660 if (SIGNED_FIXED_POINT_MODE_P (mode)
1661 && temp_trunc.is_negative ()
1662 && TREE_FIXED_CST (arg1).data != temp_trunc)
1663 temp += double_int_one;
1664
1665 /* Given a fixed-point constant, make new constant with new type,
1666 appropriately sign-extended or truncated. */
1667 t = force_fit_type_double (type, temp, -1,
1668 (temp.is_negative ()
1669 && (TYPE_UNSIGNED (type)
1670 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1671 | TREE_OVERFLOW (arg1));
1672
1673 return t;
1674 }
1675
1676 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1677 to another floating point type. */
1678
1679 static tree
1680 fold_convert_const_real_from_real (tree type, const_tree arg1)
1681 {
1682 REAL_VALUE_TYPE value;
1683 tree t;
1684
1685 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1686 t = build_real (type, value);
1687
1688 /* If converting an infinity or NAN to a representation that doesn't
1689 have one, set the overflow bit so that we can produce some kind of
1690 error message at the appropriate point if necessary. It's not the
1691 most user-friendly message, but it's better than nothing. */
1692 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1693 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1694 TREE_OVERFLOW (t) = 1;
1695 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1696 && !MODE_HAS_NANS (TYPE_MODE (type)))
1697 TREE_OVERFLOW (t) = 1;
1698 /* Regular overflow, conversion produced an infinity in a mode that
1699 can't represent them. */
1700 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1701 && REAL_VALUE_ISINF (value)
1702 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1703 TREE_OVERFLOW (t) = 1;
1704 else
1705 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1706 return t;
1707 }
1708
1709 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1710 to a floating point type. */
1711
1712 static tree
1713 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1714 {
1715 REAL_VALUE_TYPE value;
1716 tree t;
1717
1718 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1719 t = build_real (type, value);
1720
1721 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1722 return t;
1723 }
1724
1725 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1726 to another fixed-point type. */
1727
1728 static tree
1729 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1730 {
1731 FIXED_VALUE_TYPE value;
1732 tree t;
1733 bool overflow_p;
1734
1735 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1736 TYPE_SATURATING (type));
1737 t = build_fixed (type, value);
1738
1739 /* Propagate overflow flags. */
1740 if (overflow_p | TREE_OVERFLOW (arg1))
1741 TREE_OVERFLOW (t) = 1;
1742 return t;
1743 }
1744
1745 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1746 to a fixed-point type. */
1747
1748 static tree
1749 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1750 {
1751 FIXED_VALUE_TYPE value;
1752 tree t;
1753 bool overflow_p;
1754
1755 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1756 TREE_INT_CST (arg1),
1757 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1758 TYPE_SATURATING (type));
1759 t = build_fixed (type, value);
1760
1761 /* Propagate overflow flags. */
1762 if (overflow_p | TREE_OVERFLOW (arg1))
1763 TREE_OVERFLOW (t) = 1;
1764 return t;
1765 }
1766
1767 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1768 to a fixed-point type. */
1769
1770 static tree
1771 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1772 {
1773 FIXED_VALUE_TYPE value;
1774 tree t;
1775 bool overflow_p;
1776
1777 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1778 &TREE_REAL_CST (arg1),
1779 TYPE_SATURATING (type));
1780 t = build_fixed (type, value);
1781
1782 /* Propagate overflow flags. */
1783 if (overflow_p | TREE_OVERFLOW (arg1))
1784 TREE_OVERFLOW (t) = 1;
1785 return t;
1786 }
1787
1788 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1789 type TYPE. If no simplification can be done return NULL_TREE. */
1790
1791 static tree
1792 fold_convert_const (enum tree_code code, tree type, tree arg1)
1793 {
1794 if (TREE_TYPE (arg1) == type)
1795 return arg1;
1796
1797 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1798 || TREE_CODE (type) == OFFSET_TYPE)
1799 {
1800 if (TREE_CODE (arg1) == INTEGER_CST)
1801 return fold_convert_const_int_from_int (type, arg1);
1802 else if (TREE_CODE (arg1) == REAL_CST)
1803 return fold_convert_const_int_from_real (code, type, arg1);
1804 else if (TREE_CODE (arg1) == FIXED_CST)
1805 return fold_convert_const_int_from_fixed (type, arg1);
1806 }
1807 else if (TREE_CODE (type) == REAL_TYPE)
1808 {
1809 if (TREE_CODE (arg1) == INTEGER_CST)
1810 return build_real_from_int_cst (type, arg1);
1811 else if (TREE_CODE (arg1) == REAL_CST)
1812 return fold_convert_const_real_from_real (type, arg1);
1813 else if (TREE_CODE (arg1) == FIXED_CST)
1814 return fold_convert_const_real_from_fixed (type, arg1);
1815 }
1816 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1817 {
1818 if (TREE_CODE (arg1) == FIXED_CST)
1819 return fold_convert_const_fixed_from_fixed (type, arg1);
1820 else if (TREE_CODE (arg1) == INTEGER_CST)
1821 return fold_convert_const_fixed_from_int (type, arg1);
1822 else if (TREE_CODE (arg1) == REAL_CST)
1823 return fold_convert_const_fixed_from_real (type, arg1);
1824 }
1825 return NULL_TREE;
1826 }
1827
1828 /* Construct a vector of zero elements of vector type TYPE. */
1829
1830 static tree
1831 build_zero_vector (tree type)
1832 {
1833 tree t;
1834
1835 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1836 return build_vector_from_val (type, t);
1837 }
1838
1839 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1840
1841 bool
1842 fold_convertible_p (const_tree type, const_tree arg)
1843 {
1844 tree orig = TREE_TYPE (arg);
1845
1846 if (type == orig)
1847 return true;
1848
1849 if (TREE_CODE (arg) == ERROR_MARK
1850 || TREE_CODE (type) == ERROR_MARK
1851 || TREE_CODE (orig) == ERROR_MARK)
1852 return false;
1853
1854 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1855 return true;
1856
1857 switch (TREE_CODE (type))
1858 {
1859 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1860 case POINTER_TYPE: case REFERENCE_TYPE:
1861 case OFFSET_TYPE:
1862 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1863 || TREE_CODE (orig) == OFFSET_TYPE)
1864 return true;
1865 return (TREE_CODE (orig) == VECTOR_TYPE
1866 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1867
1868 case REAL_TYPE:
1869 case FIXED_POINT_TYPE:
1870 case COMPLEX_TYPE:
1871 case VECTOR_TYPE:
1872 case VOID_TYPE:
1873 return TREE_CODE (type) == TREE_CODE (orig);
1874
1875 default:
1876 return false;
1877 }
1878 }
1879
1880 /* Convert expression ARG to type TYPE. Used by the middle-end for
1881 simple conversions in preference to calling the front-end's convert. */
1882
1883 tree
1884 fold_convert_loc (location_t loc, tree type, tree arg)
1885 {
1886 tree orig = TREE_TYPE (arg);
1887 tree tem;
1888
1889 if (type == orig)
1890 return arg;
1891
1892 if (TREE_CODE (arg) == ERROR_MARK
1893 || TREE_CODE (type) == ERROR_MARK
1894 || TREE_CODE (orig) == ERROR_MARK)
1895 return error_mark_node;
1896
1897 switch (TREE_CODE (type))
1898 {
1899 case POINTER_TYPE:
1900 case REFERENCE_TYPE:
1901 /* Handle conversions between pointers to different address spaces. */
1902 if (POINTER_TYPE_P (orig)
1903 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1904 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1905 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1906 /* fall through */
1907
1908 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1909 case OFFSET_TYPE:
1910 if (TREE_CODE (arg) == INTEGER_CST)
1911 {
1912 tem = fold_convert_const (NOP_EXPR, type, arg);
1913 if (tem != NULL_TREE)
1914 return tem;
1915 }
1916 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1917 || TREE_CODE (orig) == OFFSET_TYPE)
1918 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1919 if (TREE_CODE (orig) == COMPLEX_TYPE)
1920 return fold_convert_loc (loc, type,
1921 fold_build1_loc (loc, REALPART_EXPR,
1922 TREE_TYPE (orig), arg));
1923 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1924 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1925 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1926
1927 case REAL_TYPE:
1928 if (TREE_CODE (arg) == INTEGER_CST)
1929 {
1930 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1931 if (tem != NULL_TREE)
1932 return tem;
1933 }
1934 else if (TREE_CODE (arg) == REAL_CST)
1935 {
1936 tem = fold_convert_const (NOP_EXPR, type, arg);
1937 if (tem != NULL_TREE)
1938 return tem;
1939 }
1940 else if (TREE_CODE (arg) == FIXED_CST)
1941 {
1942 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1943 if (tem != NULL_TREE)
1944 return tem;
1945 }
1946
1947 switch (TREE_CODE (orig))
1948 {
1949 case INTEGER_TYPE:
1950 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1951 case POINTER_TYPE: case REFERENCE_TYPE:
1952 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
1953
1954 case REAL_TYPE:
1955 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1956
1957 case FIXED_POINT_TYPE:
1958 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1959
1960 case COMPLEX_TYPE:
1961 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1962 return fold_convert_loc (loc, type, tem);
1963
1964 default:
1965 gcc_unreachable ();
1966 }
1967
1968 case FIXED_POINT_TYPE:
1969 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
1970 || TREE_CODE (arg) == REAL_CST)
1971 {
1972 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1973 if (tem != NULL_TREE)
1974 goto fold_convert_exit;
1975 }
1976
1977 switch (TREE_CODE (orig))
1978 {
1979 case FIXED_POINT_TYPE:
1980 case INTEGER_TYPE:
1981 case ENUMERAL_TYPE:
1982 case BOOLEAN_TYPE:
1983 case REAL_TYPE:
1984 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
1985
1986 case COMPLEX_TYPE:
1987 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
1988 return fold_convert_loc (loc, type, tem);
1989
1990 default:
1991 gcc_unreachable ();
1992 }
1993
1994 case COMPLEX_TYPE:
1995 switch (TREE_CODE (orig))
1996 {
1997 case INTEGER_TYPE:
1998 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1999 case POINTER_TYPE: case REFERENCE_TYPE:
2000 case REAL_TYPE:
2001 case FIXED_POINT_TYPE:
2002 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2003 fold_convert_loc (loc, TREE_TYPE (type), arg),
2004 fold_convert_loc (loc, TREE_TYPE (type),
2005 integer_zero_node));
2006 case COMPLEX_TYPE:
2007 {
2008 tree rpart, ipart;
2009
2010 if (TREE_CODE (arg) == COMPLEX_EXPR)
2011 {
2012 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2013 TREE_OPERAND (arg, 0));
2014 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2015 TREE_OPERAND (arg, 1));
2016 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2017 }
2018
2019 arg = save_expr (arg);
2020 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2021 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2022 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2023 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2024 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2025 }
2026
2027 default:
2028 gcc_unreachable ();
2029 }
2030
2031 case VECTOR_TYPE:
2032 if (integer_zerop (arg))
2033 return build_zero_vector (type);
2034 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2035 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2036 || TREE_CODE (orig) == VECTOR_TYPE);
2037 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2038
2039 case VOID_TYPE:
2040 tem = fold_ignored_result (arg);
2041 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2042
2043 default:
2044 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2045 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2046 gcc_unreachable ();
2047 }
2048 fold_convert_exit:
2049 protected_set_expr_location_unshare (tem, loc);
2050 return tem;
2051 }
2052 \f
2053 /* Return false if expr can be assumed not to be an lvalue, true
2054 otherwise. */
2055
2056 static bool
2057 maybe_lvalue_p (const_tree x)
2058 {
2059 /* We only need to wrap lvalue tree codes. */
2060 switch (TREE_CODE (x))
2061 {
2062 case VAR_DECL:
2063 case PARM_DECL:
2064 case RESULT_DECL:
2065 case LABEL_DECL:
2066 case FUNCTION_DECL:
2067 case SSA_NAME:
2068
2069 case COMPONENT_REF:
2070 case MEM_REF:
2071 case INDIRECT_REF:
2072 case ARRAY_REF:
2073 case ARRAY_RANGE_REF:
2074 case BIT_FIELD_REF:
2075 case OBJ_TYPE_REF:
2076
2077 case REALPART_EXPR:
2078 case IMAGPART_EXPR:
2079 case PREINCREMENT_EXPR:
2080 case PREDECREMENT_EXPR:
2081 case SAVE_EXPR:
2082 case TRY_CATCH_EXPR:
2083 case WITH_CLEANUP_EXPR:
2084 case COMPOUND_EXPR:
2085 case MODIFY_EXPR:
2086 case TARGET_EXPR:
2087 case COND_EXPR:
2088 case BIND_EXPR:
2089 break;
2090
2091 default:
2092 /* Assume the worst for front-end tree codes. */
2093 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2094 break;
2095 return false;
2096 }
2097
2098 return true;
2099 }
2100
2101 /* Return an expr equal to X but certainly not valid as an lvalue. */
2102
2103 tree
2104 non_lvalue_loc (location_t loc, tree x)
2105 {
2106 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2107 us. */
2108 if (in_gimple_form)
2109 return x;
2110
2111 if (! maybe_lvalue_p (x))
2112 return x;
2113 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2114 }
2115
2116 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2117 Zero means allow extended lvalues. */
2118
2119 int pedantic_lvalues;
2120
2121 /* When pedantic, return an expr equal to X but certainly not valid as a
2122 pedantic lvalue. Otherwise, return X. */
2123
2124 static tree
2125 pedantic_non_lvalue_loc (location_t loc, tree x)
2126 {
2127 if (pedantic_lvalues)
2128 return non_lvalue_loc (loc, x);
2129
2130 return protected_set_expr_location_unshare (x, loc);
2131 }
2132 \f
2133 /* Given a tree comparison code, return the code that is the logical inverse.
2134 It is generally not safe to do this for floating-point comparisons, except
2135 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2136 ERROR_MARK in this case. */
2137
2138 enum tree_code
2139 invert_tree_comparison (enum tree_code code, bool honor_nans)
2140 {
2141 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2142 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2143 return ERROR_MARK;
2144
2145 switch (code)
2146 {
2147 case EQ_EXPR:
2148 return NE_EXPR;
2149 case NE_EXPR:
2150 return EQ_EXPR;
2151 case GT_EXPR:
2152 return honor_nans ? UNLE_EXPR : LE_EXPR;
2153 case GE_EXPR:
2154 return honor_nans ? UNLT_EXPR : LT_EXPR;
2155 case LT_EXPR:
2156 return honor_nans ? UNGE_EXPR : GE_EXPR;
2157 case LE_EXPR:
2158 return honor_nans ? UNGT_EXPR : GT_EXPR;
2159 case LTGT_EXPR:
2160 return UNEQ_EXPR;
2161 case UNEQ_EXPR:
2162 return LTGT_EXPR;
2163 case UNGT_EXPR:
2164 return LE_EXPR;
2165 case UNGE_EXPR:
2166 return LT_EXPR;
2167 case UNLT_EXPR:
2168 return GE_EXPR;
2169 case UNLE_EXPR:
2170 return GT_EXPR;
2171 case ORDERED_EXPR:
2172 return UNORDERED_EXPR;
2173 case UNORDERED_EXPR:
2174 return ORDERED_EXPR;
2175 default:
2176 gcc_unreachable ();
2177 }
2178 }
2179
2180 /* Similar, but return the comparison that results if the operands are
2181 swapped. This is safe for floating-point. */
2182
2183 enum tree_code
2184 swap_tree_comparison (enum tree_code code)
2185 {
2186 switch (code)
2187 {
2188 case EQ_EXPR:
2189 case NE_EXPR:
2190 case ORDERED_EXPR:
2191 case UNORDERED_EXPR:
2192 case LTGT_EXPR:
2193 case UNEQ_EXPR:
2194 return code;
2195 case GT_EXPR:
2196 return LT_EXPR;
2197 case GE_EXPR:
2198 return LE_EXPR;
2199 case LT_EXPR:
2200 return GT_EXPR;
2201 case LE_EXPR:
2202 return GE_EXPR;
2203 case UNGT_EXPR:
2204 return UNLT_EXPR;
2205 case UNGE_EXPR:
2206 return UNLE_EXPR;
2207 case UNLT_EXPR:
2208 return UNGT_EXPR;
2209 case UNLE_EXPR:
2210 return UNGE_EXPR;
2211 default:
2212 gcc_unreachable ();
2213 }
2214 }
2215
2216
2217 /* Convert a comparison tree code from an enum tree_code representation
2218 into a compcode bit-based encoding. This function is the inverse of
2219 compcode_to_comparison. */
2220
2221 static enum comparison_code
2222 comparison_to_compcode (enum tree_code code)
2223 {
2224 switch (code)
2225 {
2226 case LT_EXPR:
2227 return COMPCODE_LT;
2228 case EQ_EXPR:
2229 return COMPCODE_EQ;
2230 case LE_EXPR:
2231 return COMPCODE_LE;
2232 case GT_EXPR:
2233 return COMPCODE_GT;
2234 case NE_EXPR:
2235 return COMPCODE_NE;
2236 case GE_EXPR:
2237 return COMPCODE_GE;
2238 case ORDERED_EXPR:
2239 return COMPCODE_ORD;
2240 case UNORDERED_EXPR:
2241 return COMPCODE_UNORD;
2242 case UNLT_EXPR:
2243 return COMPCODE_UNLT;
2244 case UNEQ_EXPR:
2245 return COMPCODE_UNEQ;
2246 case UNLE_EXPR:
2247 return COMPCODE_UNLE;
2248 case UNGT_EXPR:
2249 return COMPCODE_UNGT;
2250 case LTGT_EXPR:
2251 return COMPCODE_LTGT;
2252 case UNGE_EXPR:
2253 return COMPCODE_UNGE;
2254 default:
2255 gcc_unreachable ();
2256 }
2257 }
2258
2259 /* Convert a compcode bit-based encoding of a comparison operator back
2260 to GCC's enum tree_code representation. This function is the
2261 inverse of comparison_to_compcode. */
2262
2263 static enum tree_code
2264 compcode_to_comparison (enum comparison_code code)
2265 {
2266 switch (code)
2267 {
2268 case COMPCODE_LT:
2269 return LT_EXPR;
2270 case COMPCODE_EQ:
2271 return EQ_EXPR;
2272 case COMPCODE_LE:
2273 return LE_EXPR;
2274 case COMPCODE_GT:
2275 return GT_EXPR;
2276 case COMPCODE_NE:
2277 return NE_EXPR;
2278 case COMPCODE_GE:
2279 return GE_EXPR;
2280 case COMPCODE_ORD:
2281 return ORDERED_EXPR;
2282 case COMPCODE_UNORD:
2283 return UNORDERED_EXPR;
2284 case COMPCODE_UNLT:
2285 return UNLT_EXPR;
2286 case COMPCODE_UNEQ:
2287 return UNEQ_EXPR;
2288 case COMPCODE_UNLE:
2289 return UNLE_EXPR;
2290 case COMPCODE_UNGT:
2291 return UNGT_EXPR;
2292 case COMPCODE_LTGT:
2293 return LTGT_EXPR;
2294 case COMPCODE_UNGE:
2295 return UNGE_EXPR;
2296 default:
2297 gcc_unreachable ();
2298 }
2299 }
2300
2301 /* Return a tree for the comparison which is the combination of
2302 doing the AND or OR (depending on CODE) of the two operations LCODE
2303 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2304 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2305 if this makes the transformation invalid. */
2306
2307 tree
2308 combine_comparisons (location_t loc,
2309 enum tree_code code, enum tree_code lcode,
2310 enum tree_code rcode, tree truth_type,
2311 tree ll_arg, tree lr_arg)
2312 {
2313 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2314 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2315 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2316 int compcode;
2317
2318 switch (code)
2319 {
2320 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2321 compcode = lcompcode & rcompcode;
2322 break;
2323
2324 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2325 compcode = lcompcode | rcompcode;
2326 break;
2327
2328 default:
2329 return NULL_TREE;
2330 }
2331
2332 if (!honor_nans)
2333 {
2334 /* Eliminate unordered comparisons, as well as LTGT and ORD
2335 which are not used unless the mode has NaNs. */
2336 compcode &= ~COMPCODE_UNORD;
2337 if (compcode == COMPCODE_LTGT)
2338 compcode = COMPCODE_NE;
2339 else if (compcode == COMPCODE_ORD)
2340 compcode = COMPCODE_TRUE;
2341 }
2342 else if (flag_trapping_math)
2343 {
2344 /* Check that the original operation and the optimized ones will trap
2345 under the same condition. */
2346 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2347 && (lcompcode != COMPCODE_EQ)
2348 && (lcompcode != COMPCODE_ORD);
2349 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2350 && (rcompcode != COMPCODE_EQ)
2351 && (rcompcode != COMPCODE_ORD);
2352 bool trap = (compcode & COMPCODE_UNORD) == 0
2353 && (compcode != COMPCODE_EQ)
2354 && (compcode != COMPCODE_ORD);
2355
2356 /* In a short-circuited boolean expression the LHS might be
2357 such that the RHS, if evaluated, will never trap. For
2358 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2359 if neither x nor y is NaN. (This is a mixed blessing: for
2360 example, the expression above will never trap, hence
2361 optimizing it to x < y would be invalid). */
2362 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2363 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2364 rtrap = false;
2365
2366 /* If the comparison was short-circuited, and only the RHS
2367 trapped, we may now generate a spurious trap. */
2368 if (rtrap && !ltrap
2369 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2370 return NULL_TREE;
2371
2372 /* If we changed the conditions that cause a trap, we lose. */
2373 if ((ltrap || rtrap) != trap)
2374 return NULL_TREE;
2375 }
2376
2377 if (compcode == COMPCODE_TRUE)
2378 return constant_boolean_node (true, truth_type);
2379 else if (compcode == COMPCODE_FALSE)
2380 return constant_boolean_node (false, truth_type);
2381 else
2382 {
2383 enum tree_code tcode;
2384
2385 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2386 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2387 }
2388 }
2389 \f
2390 /* Return nonzero if two operands (typically of the same tree node)
2391 are necessarily equal. If either argument has side-effects this
2392 function returns zero. FLAGS modifies behavior as follows:
2393
2394 If OEP_ONLY_CONST is set, only return nonzero for constants.
2395 This function tests whether the operands are indistinguishable;
2396 it does not test whether they are equal using C's == operation.
2397 The distinction is important for IEEE floating point, because
2398 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2399 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2400
2401 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2402 even though it may hold multiple values during a function.
2403 This is because a GCC tree node guarantees that nothing else is
2404 executed between the evaluation of its "operands" (which may often
2405 be evaluated in arbitrary order). Hence if the operands themselves
2406 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2407 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2408 unset means assuming isochronic (or instantaneous) tree equivalence.
2409 Unless comparing arbitrary expression trees, such as from different
2410 statements, this flag can usually be left unset.
2411
2412 If OEP_PURE_SAME is set, then pure functions with identical arguments
2413 are considered the same. It is used when the caller has other ways
2414 to ensure that global memory is unchanged in between. */
2415
2416 int
2417 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2418 {
2419 /* If either is ERROR_MARK, they aren't equal. */
2420 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2421 || TREE_TYPE (arg0) == error_mark_node
2422 || TREE_TYPE (arg1) == error_mark_node)
2423 return 0;
2424
2425 /* Similar, if either does not have a type (like a released SSA name),
2426 they aren't equal. */
2427 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2428 return 0;
2429
2430 /* Check equality of integer constants before bailing out due to
2431 precision differences. */
2432 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2433 return tree_int_cst_equal (arg0, arg1);
2434
2435 /* If both types don't have the same signedness, then we can't consider
2436 them equal. We must check this before the STRIP_NOPS calls
2437 because they may change the signedness of the arguments. As pointers
2438 strictly don't have a signedness, require either two pointers or
2439 two non-pointers as well. */
2440 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2441 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2442 return 0;
2443
2444 /* We cannot consider pointers to different address space equal. */
2445 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2446 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2447 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2448 return 0;
2449
2450 /* If both types don't have the same precision, then it is not safe
2451 to strip NOPs. */
2452 if (element_precision (TREE_TYPE (arg0))
2453 != element_precision (TREE_TYPE (arg1)))
2454 return 0;
2455
2456 STRIP_NOPS (arg0);
2457 STRIP_NOPS (arg1);
2458
2459 /* In case both args are comparisons but with different comparison
2460 code, try to swap the comparison operands of one arg to produce
2461 a match and compare that variant. */
2462 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2463 && COMPARISON_CLASS_P (arg0)
2464 && COMPARISON_CLASS_P (arg1))
2465 {
2466 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2467
2468 if (TREE_CODE (arg0) == swap_code)
2469 return operand_equal_p (TREE_OPERAND (arg0, 0),
2470 TREE_OPERAND (arg1, 1), flags)
2471 && operand_equal_p (TREE_OPERAND (arg0, 1),
2472 TREE_OPERAND (arg1, 0), flags);
2473 }
2474
2475 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2476 /* This is needed for conversions and for COMPONENT_REF.
2477 Might as well play it safe and always test this. */
2478 || TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2479 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2480 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2481 return 0;
2482
2483 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2484 We don't care about side effects in that case because the SAVE_EXPR
2485 takes care of that for us. In all other cases, two expressions are
2486 equal if they have no side effects. If we have two identical
2487 expressions with side effects that should be treated the same due
2488 to the only side effects being identical SAVE_EXPR's, that will
2489 be detected in the recursive calls below.
2490 If we are taking an invariant address of two identical objects
2491 they are necessarily equal as well. */
2492 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2493 && (TREE_CODE (arg0) == SAVE_EXPR
2494 || (flags & OEP_CONSTANT_ADDRESS_OF)
2495 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2496 return 1;
2497
2498 /* Next handle constant cases, those for which we can return 1 even
2499 if ONLY_CONST is set. */
2500 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2501 switch (TREE_CODE (arg0))
2502 {
2503 case INTEGER_CST:
2504 return tree_int_cst_equal (arg0, arg1);
2505
2506 case FIXED_CST:
2507 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2508 TREE_FIXED_CST (arg1));
2509
2510 case REAL_CST:
2511 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2512 TREE_REAL_CST (arg1)))
2513 return 1;
2514
2515
2516 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2517 {
2518 /* If we do not distinguish between signed and unsigned zero,
2519 consider them equal. */
2520 if (real_zerop (arg0) && real_zerop (arg1))
2521 return 1;
2522 }
2523 return 0;
2524
2525 case VECTOR_CST:
2526 {
2527 unsigned i;
2528
2529 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2530 return 0;
2531
2532 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2533 {
2534 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2535 VECTOR_CST_ELT (arg1, i), flags))
2536 return 0;
2537 }
2538 return 1;
2539 }
2540
2541 case COMPLEX_CST:
2542 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2543 flags)
2544 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2545 flags));
2546
2547 case STRING_CST:
2548 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2549 && ! memcmp (TREE_STRING_POINTER (arg0),
2550 TREE_STRING_POINTER (arg1),
2551 TREE_STRING_LENGTH (arg0)));
2552
2553 case ADDR_EXPR:
2554 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2555 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2556 ? OEP_CONSTANT_ADDRESS_OF : 0);
2557 default:
2558 break;
2559 }
2560
2561 if (flags & OEP_ONLY_CONST)
2562 return 0;
2563
2564 /* Define macros to test an operand from arg0 and arg1 for equality and a
2565 variant that allows null and views null as being different from any
2566 non-null value. In the latter case, if either is null, the both
2567 must be; otherwise, do the normal comparison. */
2568 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2569 TREE_OPERAND (arg1, N), flags)
2570
2571 #define OP_SAME_WITH_NULL(N) \
2572 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2573 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2574
2575 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2576 {
2577 case tcc_unary:
2578 /* Two conversions are equal only if signedness and modes match. */
2579 switch (TREE_CODE (arg0))
2580 {
2581 CASE_CONVERT:
2582 case FIX_TRUNC_EXPR:
2583 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2584 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2585 return 0;
2586 break;
2587 default:
2588 break;
2589 }
2590
2591 return OP_SAME (0);
2592
2593
2594 case tcc_comparison:
2595 case tcc_binary:
2596 if (OP_SAME (0) && OP_SAME (1))
2597 return 1;
2598
2599 /* For commutative ops, allow the other order. */
2600 return (commutative_tree_code (TREE_CODE (arg0))
2601 && operand_equal_p (TREE_OPERAND (arg0, 0),
2602 TREE_OPERAND (arg1, 1), flags)
2603 && operand_equal_p (TREE_OPERAND (arg0, 1),
2604 TREE_OPERAND (arg1, 0), flags));
2605
2606 case tcc_reference:
2607 /* If either of the pointer (or reference) expressions we are
2608 dereferencing contain a side effect, these cannot be equal,
2609 but their addresses can be. */
2610 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2611 && (TREE_SIDE_EFFECTS (arg0)
2612 || TREE_SIDE_EFFECTS (arg1)))
2613 return 0;
2614
2615 switch (TREE_CODE (arg0))
2616 {
2617 case INDIRECT_REF:
2618 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2619 return OP_SAME (0);
2620
2621 case REALPART_EXPR:
2622 case IMAGPART_EXPR:
2623 return OP_SAME (0);
2624
2625 case TARGET_MEM_REF:
2626 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2627 /* Require equal extra operands and then fall through to MEM_REF
2628 handling of the two common operands. */
2629 if (!OP_SAME_WITH_NULL (2)
2630 || !OP_SAME_WITH_NULL (3)
2631 || !OP_SAME_WITH_NULL (4))
2632 return 0;
2633 /* Fallthru. */
2634 case MEM_REF:
2635 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2636 /* Require equal access sizes, and similar pointer types.
2637 We can have incomplete types for array references of
2638 variable-sized arrays from the Fortran frontend
2639 though. Also verify the types are compatible. */
2640 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2641 || (TYPE_SIZE (TREE_TYPE (arg0))
2642 && TYPE_SIZE (TREE_TYPE (arg1))
2643 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2644 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2645 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2646 && (TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg0, 1)))
2647 == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_OPERAND (arg1, 1))))
2648 && OP_SAME (0) && OP_SAME (1));
2649
2650 case ARRAY_REF:
2651 case ARRAY_RANGE_REF:
2652 /* Operands 2 and 3 may be null.
2653 Compare the array index by value if it is constant first as we
2654 may have different types but same value here. */
2655 if (!OP_SAME (0))
2656 return 0;
2657 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2658 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2659 TREE_OPERAND (arg1, 1))
2660 || OP_SAME (1))
2661 && OP_SAME_WITH_NULL (2)
2662 && OP_SAME_WITH_NULL (3));
2663
2664 case COMPONENT_REF:
2665 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2666 may be NULL when we're called to compare MEM_EXPRs. */
2667 if (!OP_SAME_WITH_NULL (0) || !OP_SAME (1))
2668 return 0;
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 return OP_SAME_WITH_NULL (2);
2671
2672 case BIT_FIELD_REF:
2673 if (!OP_SAME (0))
2674 return 0;
2675 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2676 return OP_SAME (1) && OP_SAME (2);
2677
2678 default:
2679 return 0;
2680 }
2681
2682 case tcc_expression:
2683 switch (TREE_CODE (arg0))
2684 {
2685 case ADDR_EXPR:
2686 case TRUTH_NOT_EXPR:
2687 return OP_SAME (0);
2688
2689 case TRUTH_ANDIF_EXPR:
2690 case TRUTH_ORIF_EXPR:
2691 return OP_SAME (0) && OP_SAME (1);
2692
2693 case FMA_EXPR:
2694 case WIDEN_MULT_PLUS_EXPR:
2695 case WIDEN_MULT_MINUS_EXPR:
2696 if (!OP_SAME (2))
2697 return 0;
2698 /* The multiplcation operands are commutative. */
2699 /* FALLTHRU */
2700
2701 case TRUTH_AND_EXPR:
2702 case TRUTH_OR_EXPR:
2703 case TRUTH_XOR_EXPR:
2704 if (OP_SAME (0) && OP_SAME (1))
2705 return 1;
2706
2707 /* Otherwise take into account this is a commutative operation. */
2708 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2709 TREE_OPERAND (arg1, 1), flags)
2710 && operand_equal_p (TREE_OPERAND (arg0, 1),
2711 TREE_OPERAND (arg1, 0), flags));
2712
2713 case COND_EXPR:
2714 case VEC_COND_EXPR:
2715 case DOT_PROD_EXPR:
2716 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2717
2718 default:
2719 return 0;
2720 }
2721
2722 case tcc_vl_exp:
2723 switch (TREE_CODE (arg0))
2724 {
2725 case CALL_EXPR:
2726 /* If the CALL_EXPRs call different functions, then they
2727 clearly can not be equal. */
2728 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2729 flags))
2730 return 0;
2731
2732 {
2733 unsigned int cef = call_expr_flags (arg0);
2734 if (flags & OEP_PURE_SAME)
2735 cef &= ECF_CONST | ECF_PURE;
2736 else
2737 cef &= ECF_CONST;
2738 if (!cef)
2739 return 0;
2740 }
2741
2742 /* Now see if all the arguments are the same. */
2743 {
2744 const_call_expr_arg_iterator iter0, iter1;
2745 const_tree a0, a1;
2746 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2747 a1 = first_const_call_expr_arg (arg1, &iter1);
2748 a0 && a1;
2749 a0 = next_const_call_expr_arg (&iter0),
2750 a1 = next_const_call_expr_arg (&iter1))
2751 if (! operand_equal_p (a0, a1, flags))
2752 return 0;
2753
2754 /* If we get here and both argument lists are exhausted
2755 then the CALL_EXPRs are equal. */
2756 return ! (a0 || a1);
2757 }
2758 default:
2759 return 0;
2760 }
2761
2762 case tcc_declaration:
2763 /* Consider __builtin_sqrt equal to sqrt. */
2764 return (TREE_CODE (arg0) == FUNCTION_DECL
2765 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2766 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2767 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2768
2769 default:
2770 return 0;
2771 }
2772
2773 #undef OP_SAME
2774 #undef OP_SAME_WITH_NULL
2775 }
2776 \f
2777 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2778 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2779
2780 When in doubt, return 0. */
2781
2782 static int
2783 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2784 {
2785 int unsignedp1, unsignedpo;
2786 tree primarg0, primarg1, primother;
2787 unsigned int correct_width;
2788
2789 if (operand_equal_p (arg0, arg1, 0))
2790 return 1;
2791
2792 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2793 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2794 return 0;
2795
2796 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2797 and see if the inner values are the same. This removes any
2798 signedness comparison, which doesn't matter here. */
2799 primarg0 = arg0, primarg1 = arg1;
2800 STRIP_NOPS (primarg0);
2801 STRIP_NOPS (primarg1);
2802 if (operand_equal_p (primarg0, primarg1, 0))
2803 return 1;
2804
2805 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2806 actual comparison operand, ARG0.
2807
2808 First throw away any conversions to wider types
2809 already present in the operands. */
2810
2811 primarg1 = get_narrower (arg1, &unsignedp1);
2812 primother = get_narrower (other, &unsignedpo);
2813
2814 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2815 if (unsignedp1 == unsignedpo
2816 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2817 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2818 {
2819 tree type = TREE_TYPE (arg0);
2820
2821 /* Make sure shorter operand is extended the right way
2822 to match the longer operand. */
2823 primarg1 = fold_convert (signed_or_unsigned_type_for
2824 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2825
2826 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2827 return 1;
2828 }
2829
2830 return 0;
2831 }
2832 \f
2833 /* See if ARG is an expression that is either a comparison or is performing
2834 arithmetic on comparisons. The comparisons must only be comparing
2835 two different values, which will be stored in *CVAL1 and *CVAL2; if
2836 they are nonzero it means that some operands have already been found.
2837 No variables may be used anywhere else in the expression except in the
2838 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2839 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2840
2841 If this is true, return 1. Otherwise, return zero. */
2842
2843 static int
2844 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2845 {
2846 enum tree_code code = TREE_CODE (arg);
2847 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2848
2849 /* We can handle some of the tcc_expression cases here. */
2850 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2851 tclass = tcc_unary;
2852 else if (tclass == tcc_expression
2853 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2854 || code == COMPOUND_EXPR))
2855 tclass = tcc_binary;
2856
2857 else if (tclass == tcc_expression && code == SAVE_EXPR
2858 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2859 {
2860 /* If we've already found a CVAL1 or CVAL2, this expression is
2861 two complex to handle. */
2862 if (*cval1 || *cval2)
2863 return 0;
2864
2865 tclass = tcc_unary;
2866 *save_p = 1;
2867 }
2868
2869 switch (tclass)
2870 {
2871 case tcc_unary:
2872 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2873
2874 case tcc_binary:
2875 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2876 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2877 cval1, cval2, save_p));
2878
2879 case tcc_constant:
2880 return 1;
2881
2882 case tcc_expression:
2883 if (code == COND_EXPR)
2884 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2885 cval1, cval2, save_p)
2886 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2887 cval1, cval2, save_p)
2888 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2889 cval1, cval2, save_p));
2890 return 0;
2891
2892 case tcc_comparison:
2893 /* First see if we can handle the first operand, then the second. For
2894 the second operand, we know *CVAL1 can't be zero. It must be that
2895 one side of the comparison is each of the values; test for the
2896 case where this isn't true by failing if the two operands
2897 are the same. */
2898
2899 if (operand_equal_p (TREE_OPERAND (arg, 0),
2900 TREE_OPERAND (arg, 1), 0))
2901 return 0;
2902
2903 if (*cval1 == 0)
2904 *cval1 = TREE_OPERAND (arg, 0);
2905 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2906 ;
2907 else if (*cval2 == 0)
2908 *cval2 = TREE_OPERAND (arg, 0);
2909 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2910 ;
2911 else
2912 return 0;
2913
2914 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2915 ;
2916 else if (*cval2 == 0)
2917 *cval2 = TREE_OPERAND (arg, 1);
2918 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2919 ;
2920 else
2921 return 0;
2922
2923 return 1;
2924
2925 default:
2926 return 0;
2927 }
2928 }
2929 \f
2930 /* ARG is a tree that is known to contain just arithmetic operations and
2931 comparisons. Evaluate the operations in the tree substituting NEW0 for
2932 any occurrence of OLD0 as an operand of a comparison and likewise for
2933 NEW1 and OLD1. */
2934
2935 static tree
2936 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2937 tree old1, tree new1)
2938 {
2939 tree type = TREE_TYPE (arg);
2940 enum tree_code code = TREE_CODE (arg);
2941 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2942
2943 /* We can handle some of the tcc_expression cases here. */
2944 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2945 tclass = tcc_unary;
2946 else if (tclass == tcc_expression
2947 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2948 tclass = tcc_binary;
2949
2950 switch (tclass)
2951 {
2952 case tcc_unary:
2953 return fold_build1_loc (loc, code, type,
2954 eval_subst (loc, TREE_OPERAND (arg, 0),
2955 old0, new0, old1, new1));
2956
2957 case tcc_binary:
2958 return fold_build2_loc (loc, code, type,
2959 eval_subst (loc, TREE_OPERAND (arg, 0),
2960 old0, new0, old1, new1),
2961 eval_subst (loc, TREE_OPERAND (arg, 1),
2962 old0, new0, old1, new1));
2963
2964 case tcc_expression:
2965 switch (code)
2966 {
2967 case SAVE_EXPR:
2968 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
2969 old1, new1);
2970
2971 case COMPOUND_EXPR:
2972 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
2973 old1, new1);
2974
2975 case COND_EXPR:
2976 return fold_build3_loc (loc, code, type,
2977 eval_subst (loc, TREE_OPERAND (arg, 0),
2978 old0, new0, old1, new1),
2979 eval_subst (loc, TREE_OPERAND (arg, 1),
2980 old0, new0, old1, new1),
2981 eval_subst (loc, TREE_OPERAND (arg, 2),
2982 old0, new0, old1, new1));
2983 default:
2984 break;
2985 }
2986 /* Fall through - ??? */
2987
2988 case tcc_comparison:
2989 {
2990 tree arg0 = TREE_OPERAND (arg, 0);
2991 tree arg1 = TREE_OPERAND (arg, 1);
2992
2993 /* We need to check both for exact equality and tree equality. The
2994 former will be true if the operand has a side-effect. In that
2995 case, we know the operand occurred exactly once. */
2996
2997 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
2998 arg0 = new0;
2999 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3000 arg0 = new1;
3001
3002 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3003 arg1 = new0;
3004 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3005 arg1 = new1;
3006
3007 return fold_build2_loc (loc, code, type, arg0, arg1);
3008 }
3009
3010 default:
3011 return arg;
3012 }
3013 }
3014 \f
3015 /* Return a tree for the case when the result of an expression is RESULT
3016 converted to TYPE and OMITTED was previously an operand of the expression
3017 but is now not needed (e.g., we folded OMITTED * 0).
3018
3019 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3020 the conversion of RESULT to TYPE. */
3021
3022 tree
3023 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3024 {
3025 tree t = fold_convert_loc (loc, type, result);
3026
3027 /* If the resulting operand is an empty statement, just return the omitted
3028 statement casted to void. */
3029 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3030 return build1_loc (loc, NOP_EXPR, void_type_node,
3031 fold_ignored_result (omitted));
3032
3033 if (TREE_SIDE_EFFECTS (omitted))
3034 return build2_loc (loc, COMPOUND_EXPR, type,
3035 fold_ignored_result (omitted), t);
3036
3037 return non_lvalue_loc (loc, t);
3038 }
3039
3040 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3041
3042 static tree
3043 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3044 tree omitted)
3045 {
3046 tree t = fold_convert_loc (loc, type, result);
3047
3048 /* If the resulting operand is an empty statement, just return the omitted
3049 statement casted to void. */
3050 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3051 return build1_loc (loc, NOP_EXPR, void_type_node,
3052 fold_ignored_result (omitted));
3053
3054 if (TREE_SIDE_EFFECTS (omitted))
3055 return build2_loc (loc, COMPOUND_EXPR, type,
3056 fold_ignored_result (omitted), t);
3057
3058 return pedantic_non_lvalue_loc (loc, t);
3059 }
3060
3061 /* Return a tree for the case when the result of an expression is RESULT
3062 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3063 of the expression but are now not needed.
3064
3065 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3066 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3067 evaluated before OMITTED2. Otherwise, if neither has side effects,
3068 just do the conversion of RESULT to TYPE. */
3069
3070 tree
3071 omit_two_operands_loc (location_t loc, tree type, tree result,
3072 tree omitted1, tree omitted2)
3073 {
3074 tree t = fold_convert_loc (loc, type, result);
3075
3076 if (TREE_SIDE_EFFECTS (omitted2))
3077 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3078 if (TREE_SIDE_EFFECTS (omitted1))
3079 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3080
3081 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3082 }
3083
3084 \f
3085 /* Return a simplified tree node for the truth-negation of ARG. This
3086 never alters ARG itself. We assume that ARG is an operation that
3087 returns a truth value (0 or 1).
3088
3089 FIXME: one would think we would fold the result, but it causes
3090 problems with the dominator optimizer. */
3091
3092 static tree
3093 fold_truth_not_expr (location_t loc, tree arg)
3094 {
3095 tree type = TREE_TYPE (arg);
3096 enum tree_code code = TREE_CODE (arg);
3097 location_t loc1, loc2;
3098
3099 /* If this is a comparison, we can simply invert it, except for
3100 floating-point non-equality comparisons, in which case we just
3101 enclose a TRUTH_NOT_EXPR around what we have. */
3102
3103 if (TREE_CODE_CLASS (code) == tcc_comparison)
3104 {
3105 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3106 if (FLOAT_TYPE_P (op_type)
3107 && flag_trapping_math
3108 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3109 && code != NE_EXPR && code != EQ_EXPR)
3110 return NULL_TREE;
3111
3112 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3113 if (code == ERROR_MARK)
3114 return NULL_TREE;
3115
3116 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3117 TREE_OPERAND (arg, 1));
3118 }
3119
3120 switch (code)
3121 {
3122 case INTEGER_CST:
3123 return constant_boolean_node (integer_zerop (arg), type);
3124
3125 case TRUTH_AND_EXPR:
3126 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3127 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3128 return build2_loc (loc, TRUTH_OR_EXPR, type,
3129 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3130 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3131
3132 case TRUTH_OR_EXPR:
3133 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3134 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3135 return build2_loc (loc, TRUTH_AND_EXPR, type,
3136 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3137 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3138
3139 case TRUTH_XOR_EXPR:
3140 /* Here we can invert either operand. We invert the first operand
3141 unless the second operand is a TRUTH_NOT_EXPR in which case our
3142 result is the XOR of the first operand with the inside of the
3143 negation of the second operand. */
3144
3145 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3146 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3147 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3148 else
3149 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3150 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3151 TREE_OPERAND (arg, 1));
3152
3153 case TRUTH_ANDIF_EXPR:
3154 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3155 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3156 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3157 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3158 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3159
3160 case TRUTH_ORIF_EXPR:
3161 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3162 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3163 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3164 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3165 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3166
3167 case TRUTH_NOT_EXPR:
3168 return TREE_OPERAND (arg, 0);
3169
3170 case COND_EXPR:
3171 {
3172 tree arg1 = TREE_OPERAND (arg, 1);
3173 tree arg2 = TREE_OPERAND (arg, 2);
3174
3175 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3176 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3177
3178 /* A COND_EXPR may have a throw as one operand, which
3179 then has void type. Just leave void operands
3180 as they are. */
3181 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3182 VOID_TYPE_P (TREE_TYPE (arg1))
3183 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3184 VOID_TYPE_P (TREE_TYPE (arg2))
3185 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3186 }
3187
3188 case COMPOUND_EXPR:
3189 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3190 return build2_loc (loc, COMPOUND_EXPR, type,
3191 TREE_OPERAND (arg, 0),
3192 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3193
3194 case NON_LVALUE_EXPR:
3195 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3196 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3197
3198 CASE_CONVERT:
3199 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3200 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3201
3202 /* ... fall through ... */
3203
3204 case FLOAT_EXPR:
3205 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3206 return build1_loc (loc, TREE_CODE (arg), type,
3207 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3208
3209 case BIT_AND_EXPR:
3210 if (!integer_onep (TREE_OPERAND (arg, 1)))
3211 return NULL_TREE;
3212 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3213
3214 case SAVE_EXPR:
3215 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3216
3217 case CLEANUP_POINT_EXPR:
3218 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3219 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3220 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3221
3222 default:
3223 return NULL_TREE;
3224 }
3225 }
3226
3227 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3228 assume that ARG is an operation that returns a truth value (0 or 1
3229 for scalars, 0 or -1 for vectors). Return the folded expression if
3230 folding is successful. Otherwise, return NULL_TREE. */
3231
3232 static tree
3233 fold_invert_truthvalue (location_t loc, tree arg)
3234 {
3235 tree type = TREE_TYPE (arg);
3236 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3237 ? BIT_NOT_EXPR
3238 : TRUTH_NOT_EXPR,
3239 type, arg);
3240 }
3241
3242 /* Return a simplified tree node for the truth-negation of ARG. This
3243 never alters ARG itself. We assume that ARG is an operation that
3244 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3245
3246 tree
3247 invert_truthvalue_loc (location_t loc, tree arg)
3248 {
3249 if (TREE_CODE (arg) == ERROR_MARK)
3250 return arg;
3251
3252 tree type = TREE_TYPE (arg);
3253 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3254 ? BIT_NOT_EXPR
3255 : TRUTH_NOT_EXPR,
3256 type, arg);
3257 }
3258
3259 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3260 operands are another bit-wise operation with a common input. If so,
3261 distribute the bit operations to save an operation and possibly two if
3262 constants are involved. For example, convert
3263 (A | B) & (A | C) into A | (B & C)
3264 Further simplification will occur if B and C are constants.
3265
3266 If this optimization cannot be done, 0 will be returned. */
3267
3268 static tree
3269 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3270 tree arg0, tree arg1)
3271 {
3272 tree common;
3273 tree left, right;
3274
3275 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3276 || TREE_CODE (arg0) == code
3277 || (TREE_CODE (arg0) != BIT_AND_EXPR
3278 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3279 return 0;
3280
3281 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3282 {
3283 common = TREE_OPERAND (arg0, 0);
3284 left = TREE_OPERAND (arg0, 1);
3285 right = TREE_OPERAND (arg1, 1);
3286 }
3287 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3288 {
3289 common = TREE_OPERAND (arg0, 0);
3290 left = TREE_OPERAND (arg0, 1);
3291 right = TREE_OPERAND (arg1, 0);
3292 }
3293 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3294 {
3295 common = TREE_OPERAND (arg0, 1);
3296 left = TREE_OPERAND (arg0, 0);
3297 right = TREE_OPERAND (arg1, 1);
3298 }
3299 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3300 {
3301 common = TREE_OPERAND (arg0, 1);
3302 left = TREE_OPERAND (arg0, 0);
3303 right = TREE_OPERAND (arg1, 0);
3304 }
3305 else
3306 return 0;
3307
3308 common = fold_convert_loc (loc, type, common);
3309 left = fold_convert_loc (loc, type, left);
3310 right = fold_convert_loc (loc, type, right);
3311 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3312 fold_build2_loc (loc, code, type, left, right));
3313 }
3314
3315 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3316 with code CODE. This optimization is unsafe. */
3317 static tree
3318 distribute_real_division (location_t loc, enum tree_code code, tree type,
3319 tree arg0, tree arg1)
3320 {
3321 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3322 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3323
3324 /* (A / C) +- (B / C) -> (A +- B) / C. */
3325 if (mul0 == mul1
3326 && operand_equal_p (TREE_OPERAND (arg0, 1),
3327 TREE_OPERAND (arg1, 1), 0))
3328 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3329 fold_build2_loc (loc, code, type,
3330 TREE_OPERAND (arg0, 0),
3331 TREE_OPERAND (arg1, 0)),
3332 TREE_OPERAND (arg0, 1));
3333
3334 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3335 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3336 TREE_OPERAND (arg1, 0), 0)
3337 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3338 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3339 {
3340 REAL_VALUE_TYPE r0, r1;
3341 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3342 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3343 if (!mul0)
3344 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3345 if (!mul1)
3346 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3347 real_arithmetic (&r0, code, &r0, &r1);
3348 return fold_build2_loc (loc, MULT_EXPR, type,
3349 TREE_OPERAND (arg0, 0),
3350 build_real (type, r0));
3351 }
3352
3353 return NULL_TREE;
3354 }
3355 \f
3356 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3357 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3358
3359 static tree
3360 make_bit_field_ref (location_t loc, tree inner, tree type,
3361 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3362 {
3363 tree result, bftype;
3364
3365 if (bitpos == 0)
3366 {
3367 tree size = TYPE_SIZE (TREE_TYPE (inner));
3368 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3369 || POINTER_TYPE_P (TREE_TYPE (inner)))
3370 && host_integerp (size, 0)
3371 && tree_low_cst (size, 0) == bitsize)
3372 return fold_convert_loc (loc, type, inner);
3373 }
3374
3375 bftype = type;
3376 if (TYPE_PRECISION (bftype) != bitsize
3377 || TYPE_UNSIGNED (bftype) == !unsignedp)
3378 bftype = build_nonstandard_integer_type (bitsize, 0);
3379
3380 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3381 size_int (bitsize), bitsize_int (bitpos));
3382
3383 if (bftype != type)
3384 result = fold_convert_loc (loc, type, result);
3385
3386 return result;
3387 }
3388
3389 /* Optimize a bit-field compare.
3390
3391 There are two cases: First is a compare against a constant and the
3392 second is a comparison of two items where the fields are at the same
3393 bit position relative to the start of a chunk (byte, halfword, word)
3394 large enough to contain it. In these cases we can avoid the shift
3395 implicit in bitfield extractions.
3396
3397 For constants, we emit a compare of the shifted constant with the
3398 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3399 compared. For two fields at the same position, we do the ANDs with the
3400 similar mask and compare the result of the ANDs.
3401
3402 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3403 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3404 are the left and right operands of the comparison, respectively.
3405
3406 If the optimization described above can be done, we return the resulting
3407 tree. Otherwise we return zero. */
3408
3409 static tree
3410 optimize_bit_field_compare (location_t loc, enum tree_code code,
3411 tree compare_type, tree lhs, tree rhs)
3412 {
3413 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3414 tree type = TREE_TYPE (lhs);
3415 tree signed_type, unsigned_type;
3416 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3417 enum machine_mode lmode, rmode, nmode;
3418 int lunsignedp, runsignedp;
3419 int lvolatilep = 0, rvolatilep = 0;
3420 tree linner, rinner = NULL_TREE;
3421 tree mask;
3422 tree offset;
3423
3424 /* In the strict volatile bitfields case, doing code changes here may prevent
3425 other optimizations, in particular in a SLOW_BYTE_ACCESS setting. */
3426 if (flag_strict_volatile_bitfields > 0)
3427 return 0;
3428
3429 /* Get all the information about the extractions being done. If the bit size
3430 if the same as the size of the underlying object, we aren't doing an
3431 extraction at all and so can do nothing. We also don't want to
3432 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3433 then will no longer be able to replace it. */
3434 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3435 &lunsignedp, &lvolatilep, false);
3436 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3437 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR)
3438 return 0;
3439
3440 if (!const_p)
3441 {
3442 /* If this is not a constant, we can only do something if bit positions,
3443 sizes, and signedness are the same. */
3444 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3445 &runsignedp, &rvolatilep, false);
3446
3447 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3448 || lunsignedp != runsignedp || offset != 0
3449 || TREE_CODE (rinner) == PLACEHOLDER_EXPR)
3450 return 0;
3451 }
3452
3453 /* See if we can find a mode to refer to this field. We should be able to,
3454 but fail if we can't. */
3455 if (lvolatilep
3456 && GET_MODE_BITSIZE (lmode) > 0
3457 && flag_strict_volatile_bitfields > 0)
3458 nmode = lmode;
3459 else
3460 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3461 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3462 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3463 TYPE_ALIGN (TREE_TYPE (rinner))),
3464 word_mode, lvolatilep || rvolatilep);
3465 if (nmode == VOIDmode)
3466 return 0;
3467
3468 /* Set signed and unsigned types of the precision of this mode for the
3469 shifts below. */
3470 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3471 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3472
3473 /* Compute the bit position and size for the new reference and our offset
3474 within it. If the new reference is the same size as the original, we
3475 won't optimize anything, so return zero. */
3476 nbitsize = GET_MODE_BITSIZE (nmode);
3477 nbitpos = lbitpos & ~ (nbitsize - 1);
3478 lbitpos -= nbitpos;
3479 if (nbitsize == lbitsize)
3480 return 0;
3481
3482 if (BYTES_BIG_ENDIAN)
3483 lbitpos = nbitsize - lbitsize - lbitpos;
3484
3485 /* Make the mask to be used against the extracted field. */
3486 mask = build_int_cst_type (unsigned_type, -1);
3487 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3488 mask = const_binop (RSHIFT_EXPR, mask,
3489 size_int (nbitsize - lbitsize - lbitpos));
3490
3491 if (! const_p)
3492 /* If not comparing with constant, just rework the comparison
3493 and return. */
3494 return fold_build2_loc (loc, code, compare_type,
3495 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3496 make_bit_field_ref (loc, linner,
3497 unsigned_type,
3498 nbitsize, nbitpos,
3499 1),
3500 mask),
3501 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3502 make_bit_field_ref (loc, rinner,
3503 unsigned_type,
3504 nbitsize, nbitpos,
3505 1),
3506 mask));
3507
3508 /* Otherwise, we are handling the constant case. See if the constant is too
3509 big for the field. Warn and return a tree of for 0 (false) if so. We do
3510 this not only for its own sake, but to avoid having to test for this
3511 error case below. If we didn't, we might generate wrong code.
3512
3513 For unsigned fields, the constant shifted right by the field length should
3514 be all zero. For signed fields, the high-order bits should agree with
3515 the sign bit. */
3516
3517 if (lunsignedp)
3518 {
3519 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3520 fold_convert_loc (loc,
3521 unsigned_type, rhs),
3522 size_int (lbitsize))))
3523 {
3524 warning (0, "comparison is always %d due to width of bit-field",
3525 code == NE_EXPR);
3526 return constant_boolean_node (code == NE_EXPR, compare_type);
3527 }
3528 }
3529 else
3530 {
3531 tree tem = const_binop (RSHIFT_EXPR,
3532 fold_convert_loc (loc, signed_type, rhs),
3533 size_int (lbitsize - 1));
3534 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3535 {
3536 warning (0, "comparison is always %d due to width of bit-field",
3537 code == NE_EXPR);
3538 return constant_boolean_node (code == NE_EXPR, compare_type);
3539 }
3540 }
3541
3542 /* Single-bit compares should always be against zero. */
3543 if (lbitsize == 1 && ! integer_zerop (rhs))
3544 {
3545 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3546 rhs = build_int_cst (type, 0);
3547 }
3548
3549 /* Make a new bitfield reference, shift the constant over the
3550 appropriate number of bits and mask it with the computed mask
3551 (in case this was a signed field). If we changed it, make a new one. */
3552 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3553 if (lvolatilep)
3554 {
3555 TREE_SIDE_EFFECTS (lhs) = 1;
3556 TREE_THIS_VOLATILE (lhs) = 1;
3557 }
3558
3559 rhs = const_binop (BIT_AND_EXPR,
3560 const_binop (LSHIFT_EXPR,
3561 fold_convert_loc (loc, unsigned_type, rhs),
3562 size_int (lbitpos)),
3563 mask);
3564
3565 lhs = build2_loc (loc, code, compare_type,
3566 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3567 return lhs;
3568 }
3569 \f
3570 /* Subroutine for fold_truth_andor_1: decode a field reference.
3571
3572 If EXP is a comparison reference, we return the innermost reference.
3573
3574 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3575 set to the starting bit number.
3576
3577 If the innermost field can be completely contained in a mode-sized
3578 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3579
3580 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3581 otherwise it is not changed.
3582
3583 *PUNSIGNEDP is set to the signedness of the field.
3584
3585 *PMASK is set to the mask used. This is either contained in a
3586 BIT_AND_EXPR or derived from the width of the field.
3587
3588 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3589
3590 Return 0 if this is not a component reference or is one that we can't
3591 do anything with. */
3592
3593 static tree
3594 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3595 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3596 int *punsignedp, int *pvolatilep,
3597 tree *pmask, tree *pand_mask)
3598 {
3599 tree outer_type = 0;
3600 tree and_mask = 0;
3601 tree mask, inner, offset;
3602 tree unsigned_type;
3603 unsigned int precision;
3604
3605 /* All the optimizations using this function assume integer fields.
3606 There are problems with FP fields since the type_for_size call
3607 below can fail for, e.g., XFmode. */
3608 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3609 return 0;
3610
3611 /* We are interested in the bare arrangement of bits, so strip everything
3612 that doesn't affect the machine mode. However, record the type of the
3613 outermost expression if it may matter below. */
3614 if (CONVERT_EXPR_P (exp)
3615 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3616 outer_type = TREE_TYPE (exp);
3617 STRIP_NOPS (exp);
3618
3619 if (TREE_CODE (exp) == BIT_AND_EXPR)
3620 {
3621 and_mask = TREE_OPERAND (exp, 1);
3622 exp = TREE_OPERAND (exp, 0);
3623 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3624 if (TREE_CODE (and_mask) != INTEGER_CST)
3625 return 0;
3626 }
3627
3628 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3629 punsignedp, pvolatilep, false);
3630 if ((inner == exp && and_mask == 0)
3631 || *pbitsize < 0 || offset != 0
3632 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3633 return 0;
3634
3635 /* If the number of bits in the reference is the same as the bitsize of
3636 the outer type, then the outer type gives the signedness. Otherwise
3637 (in case of a small bitfield) the signedness is unchanged. */
3638 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3639 *punsignedp = TYPE_UNSIGNED (outer_type);
3640
3641 /* Compute the mask to access the bitfield. */
3642 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3643 precision = TYPE_PRECISION (unsigned_type);
3644
3645 mask = build_int_cst_type (unsigned_type, -1);
3646
3647 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3648 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3649
3650 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3651 if (and_mask != 0)
3652 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3653 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3654
3655 *pmask = mask;
3656 *pand_mask = and_mask;
3657 return inner;
3658 }
3659
3660 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3661 bit positions. */
3662
3663 static int
3664 all_ones_mask_p (const_tree mask, int size)
3665 {
3666 tree type = TREE_TYPE (mask);
3667 unsigned int precision = TYPE_PRECISION (type);
3668 tree tmask;
3669
3670 tmask = build_int_cst_type (signed_type_for (type), -1);
3671
3672 return
3673 tree_int_cst_equal (mask,
3674 const_binop (RSHIFT_EXPR,
3675 const_binop (LSHIFT_EXPR, tmask,
3676 size_int (precision - size)),
3677 size_int (precision - size)));
3678 }
3679
3680 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3681 represents the sign bit of EXP's type. If EXP represents a sign
3682 or zero extension, also test VAL against the unextended type.
3683 The return value is the (sub)expression whose sign bit is VAL,
3684 or NULL_TREE otherwise. */
3685
3686 static tree
3687 sign_bit_p (tree exp, const_tree val)
3688 {
3689 unsigned HOST_WIDE_INT mask_lo, lo;
3690 HOST_WIDE_INT mask_hi, hi;
3691 int width;
3692 tree t;
3693
3694 /* Tree EXP must have an integral type. */
3695 t = TREE_TYPE (exp);
3696 if (! INTEGRAL_TYPE_P (t))
3697 return NULL_TREE;
3698
3699 /* Tree VAL must be an integer constant. */
3700 if (TREE_CODE (val) != INTEGER_CST
3701 || TREE_OVERFLOW (val))
3702 return NULL_TREE;
3703
3704 width = TYPE_PRECISION (t);
3705 if (width > HOST_BITS_PER_WIDE_INT)
3706 {
3707 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3708 lo = 0;
3709
3710 mask_hi = ((unsigned HOST_WIDE_INT) -1
3711 >> (HOST_BITS_PER_DOUBLE_INT - width));
3712 mask_lo = -1;
3713 }
3714 else
3715 {
3716 hi = 0;
3717 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3718
3719 mask_hi = 0;
3720 mask_lo = ((unsigned HOST_WIDE_INT) -1
3721 >> (HOST_BITS_PER_WIDE_INT - width));
3722 }
3723
3724 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3725 treat VAL as if it were unsigned. */
3726 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3727 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3728 return exp;
3729
3730 /* Handle extension from a narrower type. */
3731 if (TREE_CODE (exp) == NOP_EXPR
3732 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3733 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3734
3735 return NULL_TREE;
3736 }
3737
3738 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3739 to be evaluated unconditionally. */
3740
3741 static int
3742 simple_operand_p (const_tree exp)
3743 {
3744 /* Strip any conversions that don't change the machine mode. */
3745 STRIP_NOPS (exp);
3746
3747 return (CONSTANT_CLASS_P (exp)
3748 || TREE_CODE (exp) == SSA_NAME
3749 || (DECL_P (exp)
3750 && ! TREE_ADDRESSABLE (exp)
3751 && ! TREE_THIS_VOLATILE (exp)
3752 && ! DECL_NONLOCAL (exp)
3753 /* Don't regard global variables as simple. They may be
3754 allocated in ways unknown to the compiler (shared memory,
3755 #pragma weak, etc). */
3756 && ! TREE_PUBLIC (exp)
3757 && ! DECL_EXTERNAL (exp)
3758 /* Loading a static variable is unduly expensive, but global
3759 registers aren't expensive. */
3760 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3761 }
3762
3763 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3764 to be evaluated unconditionally.
3765 I addition to simple_operand_p, we assume that comparisons, conversions,
3766 and logic-not operations are simple, if their operands are simple, too. */
3767
3768 static bool
3769 simple_operand_p_2 (tree exp)
3770 {
3771 enum tree_code code;
3772
3773 if (TREE_SIDE_EFFECTS (exp)
3774 || tree_could_trap_p (exp))
3775 return false;
3776
3777 while (CONVERT_EXPR_P (exp))
3778 exp = TREE_OPERAND (exp, 0);
3779
3780 code = TREE_CODE (exp);
3781
3782 if (TREE_CODE_CLASS (code) == tcc_comparison)
3783 return (simple_operand_p (TREE_OPERAND (exp, 0))
3784 && simple_operand_p (TREE_OPERAND (exp, 1)));
3785
3786 if (code == TRUTH_NOT_EXPR)
3787 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3788
3789 return simple_operand_p (exp);
3790 }
3791
3792 \f
3793 /* The following functions are subroutines to fold_range_test and allow it to
3794 try to change a logical combination of comparisons into a range test.
3795
3796 For example, both
3797 X == 2 || X == 3 || X == 4 || X == 5
3798 and
3799 X >= 2 && X <= 5
3800 are converted to
3801 (unsigned) (X - 2) <= 3
3802
3803 We describe each set of comparisons as being either inside or outside
3804 a range, using a variable named like IN_P, and then describe the
3805 range with a lower and upper bound. If one of the bounds is omitted,
3806 it represents either the highest or lowest value of the type.
3807
3808 In the comments below, we represent a range by two numbers in brackets
3809 preceded by a "+" to designate being inside that range, or a "-" to
3810 designate being outside that range, so the condition can be inverted by
3811 flipping the prefix. An omitted bound is represented by a "-". For
3812 example, "- [-, 10]" means being outside the range starting at the lowest
3813 possible value and ending at 10, in other words, being greater than 10.
3814 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3815 always false.
3816
3817 We set up things so that the missing bounds are handled in a consistent
3818 manner so neither a missing bound nor "true" and "false" need to be
3819 handled using a special case. */
3820
3821 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3822 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3823 and UPPER1_P are nonzero if the respective argument is an upper bound
3824 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3825 must be specified for a comparison. ARG1 will be converted to ARG0's
3826 type if both are specified. */
3827
3828 static tree
3829 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3830 tree arg1, int upper1_p)
3831 {
3832 tree tem;
3833 int result;
3834 int sgn0, sgn1;
3835
3836 /* If neither arg represents infinity, do the normal operation.
3837 Else, if not a comparison, return infinity. Else handle the special
3838 comparison rules. Note that most of the cases below won't occur, but
3839 are handled for consistency. */
3840
3841 if (arg0 != 0 && arg1 != 0)
3842 {
3843 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3844 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3845 STRIP_NOPS (tem);
3846 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3847 }
3848
3849 if (TREE_CODE_CLASS (code) != tcc_comparison)
3850 return 0;
3851
3852 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3853 for neither. In real maths, we cannot assume open ended ranges are
3854 the same. But, this is computer arithmetic, where numbers are finite.
3855 We can therefore make the transformation of any unbounded range with
3856 the value Z, Z being greater than any representable number. This permits
3857 us to treat unbounded ranges as equal. */
3858 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3859 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3860 switch (code)
3861 {
3862 case EQ_EXPR:
3863 result = sgn0 == sgn1;
3864 break;
3865 case NE_EXPR:
3866 result = sgn0 != sgn1;
3867 break;
3868 case LT_EXPR:
3869 result = sgn0 < sgn1;
3870 break;
3871 case LE_EXPR:
3872 result = sgn0 <= sgn1;
3873 break;
3874 case GT_EXPR:
3875 result = sgn0 > sgn1;
3876 break;
3877 case GE_EXPR:
3878 result = sgn0 >= sgn1;
3879 break;
3880 default:
3881 gcc_unreachable ();
3882 }
3883
3884 return constant_boolean_node (result, type);
3885 }
3886 \f
3887 /* Helper routine for make_range. Perform one step for it, return
3888 new expression if the loop should continue or NULL_TREE if it should
3889 stop. */
3890
3891 tree
3892 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3893 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3894 bool *strict_overflow_p)
3895 {
3896 tree arg0_type = TREE_TYPE (arg0);
3897 tree n_low, n_high, low = *p_low, high = *p_high;
3898 int in_p = *p_in_p, n_in_p;
3899
3900 switch (code)
3901 {
3902 case TRUTH_NOT_EXPR:
3903 /* We can only do something if the range is testing for zero. */
3904 if (low == NULL_TREE || high == NULL_TREE
3905 || ! integer_zerop (low) || ! integer_zerop (high))
3906 return NULL_TREE;
3907 *p_in_p = ! in_p;
3908 return arg0;
3909
3910 case EQ_EXPR: case NE_EXPR:
3911 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3912 /* We can only do something if the range is testing for zero
3913 and if the second operand is an integer constant. Note that
3914 saying something is "in" the range we make is done by
3915 complementing IN_P since it will set in the initial case of
3916 being not equal to zero; "out" is leaving it alone. */
3917 if (low == NULL_TREE || high == NULL_TREE
3918 || ! integer_zerop (low) || ! integer_zerop (high)
3919 || TREE_CODE (arg1) != INTEGER_CST)
3920 return NULL_TREE;
3921
3922 switch (code)
3923 {
3924 case NE_EXPR: /* - [c, c] */
3925 low = high = arg1;
3926 break;
3927 case EQ_EXPR: /* + [c, c] */
3928 in_p = ! in_p, low = high = arg1;
3929 break;
3930 case GT_EXPR: /* - [-, c] */
3931 low = 0, high = arg1;
3932 break;
3933 case GE_EXPR: /* + [c, -] */
3934 in_p = ! in_p, low = arg1, high = 0;
3935 break;
3936 case LT_EXPR: /* - [c, -] */
3937 low = arg1, high = 0;
3938 break;
3939 case LE_EXPR: /* + [-, c] */
3940 in_p = ! in_p, low = 0, high = arg1;
3941 break;
3942 default:
3943 gcc_unreachable ();
3944 }
3945
3946 /* If this is an unsigned comparison, we also know that EXP is
3947 greater than or equal to zero. We base the range tests we make
3948 on that fact, so we record it here so we can parse existing
3949 range tests. We test arg0_type since often the return type
3950 of, e.g. EQ_EXPR, is boolean. */
3951 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3952 {
3953 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3954 in_p, low, high, 1,
3955 build_int_cst (arg0_type, 0),
3956 NULL_TREE))
3957 return NULL_TREE;
3958
3959 in_p = n_in_p, low = n_low, high = n_high;
3960
3961 /* If the high bound is missing, but we have a nonzero low
3962 bound, reverse the range so it goes from zero to the low bound
3963 minus 1. */
3964 if (high == 0 && low && ! integer_zerop (low))
3965 {
3966 in_p = ! in_p;
3967 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
3968 integer_one_node, 0);
3969 low = build_int_cst (arg0_type, 0);
3970 }
3971 }
3972
3973 *p_low = low;
3974 *p_high = high;
3975 *p_in_p = in_p;
3976 return arg0;
3977
3978 case NEGATE_EXPR:
3979 /* If flag_wrapv and ARG0_TYPE is signed, make sure
3980 low and high are non-NULL, then normalize will DTRT. */
3981 if (!TYPE_UNSIGNED (arg0_type)
3982 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
3983 {
3984 if (low == NULL_TREE)
3985 low = TYPE_MIN_VALUE (arg0_type);
3986 if (high == NULL_TREE)
3987 high = TYPE_MAX_VALUE (arg0_type);
3988 }
3989
3990 /* (-x) IN [a,b] -> x in [-b, -a] */
3991 n_low = range_binop (MINUS_EXPR, exp_type,
3992 build_int_cst (exp_type, 0),
3993 0, high, 1);
3994 n_high = range_binop (MINUS_EXPR, exp_type,
3995 build_int_cst (exp_type, 0),
3996 0, low, 0);
3997 if (n_high != 0 && TREE_OVERFLOW (n_high))
3998 return NULL_TREE;
3999 goto normalize;
4000
4001 case BIT_NOT_EXPR:
4002 /* ~ X -> -X - 1 */
4003 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4004 build_int_cst (exp_type, 1));
4005
4006 case PLUS_EXPR:
4007 case MINUS_EXPR:
4008 if (TREE_CODE (arg1) != INTEGER_CST)
4009 return NULL_TREE;
4010
4011 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4012 move a constant to the other side. */
4013 if (!TYPE_UNSIGNED (arg0_type)
4014 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4015 return NULL_TREE;
4016
4017 /* If EXP is signed, any overflow in the computation is undefined,
4018 so we don't worry about it so long as our computations on
4019 the bounds don't overflow. For unsigned, overflow is defined
4020 and this is exactly the right thing. */
4021 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4022 arg0_type, low, 0, arg1, 0);
4023 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4024 arg0_type, high, 1, arg1, 0);
4025 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4026 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4027 return NULL_TREE;
4028
4029 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4030 *strict_overflow_p = true;
4031
4032 normalize:
4033 /* Check for an unsigned range which has wrapped around the maximum
4034 value thus making n_high < n_low, and normalize it. */
4035 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4036 {
4037 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4038 integer_one_node, 0);
4039 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4040 integer_one_node, 0);
4041
4042 /* If the range is of the form +/- [ x+1, x ], we won't
4043 be able to normalize it. But then, it represents the
4044 whole range or the empty set, so make it
4045 +/- [ -, - ]. */
4046 if (tree_int_cst_equal (n_low, low)
4047 && tree_int_cst_equal (n_high, high))
4048 low = high = 0;
4049 else
4050 in_p = ! in_p;
4051 }
4052 else
4053 low = n_low, high = n_high;
4054
4055 *p_low = low;
4056 *p_high = high;
4057 *p_in_p = in_p;
4058 return arg0;
4059
4060 CASE_CONVERT:
4061 case NON_LVALUE_EXPR:
4062 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4063 return NULL_TREE;
4064
4065 if (! INTEGRAL_TYPE_P (arg0_type)
4066 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4067 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4068 return NULL_TREE;
4069
4070 n_low = low, n_high = high;
4071
4072 if (n_low != 0)
4073 n_low = fold_convert_loc (loc, arg0_type, n_low);
4074
4075 if (n_high != 0)
4076 n_high = fold_convert_loc (loc, arg0_type, n_high);
4077
4078 /* If we're converting arg0 from an unsigned type, to exp,
4079 a signed type, we will be doing the comparison as unsigned.
4080 The tests above have already verified that LOW and HIGH
4081 are both positive.
4082
4083 So we have to ensure that we will handle large unsigned
4084 values the same way that the current signed bounds treat
4085 negative values. */
4086
4087 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4088 {
4089 tree high_positive;
4090 tree equiv_type;
4091 /* For fixed-point modes, we need to pass the saturating flag
4092 as the 2nd parameter. */
4093 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4094 equiv_type
4095 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4096 TYPE_SATURATING (arg0_type));
4097 else
4098 equiv_type
4099 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4100
4101 /* A range without an upper bound is, naturally, unbounded.
4102 Since convert would have cropped a very large value, use
4103 the max value for the destination type. */
4104 high_positive
4105 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4106 : TYPE_MAX_VALUE (arg0_type);
4107
4108 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4109 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4110 fold_convert_loc (loc, arg0_type,
4111 high_positive),
4112 build_int_cst (arg0_type, 1));
4113
4114 /* If the low bound is specified, "and" the range with the
4115 range for which the original unsigned value will be
4116 positive. */
4117 if (low != 0)
4118 {
4119 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4120 1, fold_convert_loc (loc, arg0_type,
4121 integer_zero_node),
4122 high_positive))
4123 return NULL_TREE;
4124
4125 in_p = (n_in_p == in_p);
4126 }
4127 else
4128 {
4129 /* Otherwise, "or" the range with the range of the input
4130 that will be interpreted as negative. */
4131 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4132 1, fold_convert_loc (loc, arg0_type,
4133 integer_zero_node),
4134 high_positive))
4135 return NULL_TREE;
4136
4137 in_p = (in_p != n_in_p);
4138 }
4139 }
4140
4141 *p_low = n_low;
4142 *p_high = n_high;
4143 *p_in_p = in_p;
4144 return arg0;
4145
4146 default:
4147 return NULL_TREE;
4148 }
4149 }
4150
4151 /* Given EXP, a logical expression, set the range it is testing into
4152 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4153 actually being tested. *PLOW and *PHIGH will be made of the same
4154 type as the returned expression. If EXP is not a comparison, we
4155 will most likely not be returning a useful value and range. Set
4156 *STRICT_OVERFLOW_P to true if the return value is only valid
4157 because signed overflow is undefined; otherwise, do not change
4158 *STRICT_OVERFLOW_P. */
4159
4160 tree
4161 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4162 bool *strict_overflow_p)
4163 {
4164 enum tree_code code;
4165 tree arg0, arg1 = NULL_TREE;
4166 tree exp_type, nexp;
4167 int in_p;
4168 tree low, high;
4169 location_t loc = EXPR_LOCATION (exp);
4170
4171 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4172 and see if we can refine the range. Some of the cases below may not
4173 happen, but it doesn't seem worth worrying about this. We "continue"
4174 the outer loop when we've changed something; otherwise we "break"
4175 the switch, which will "break" the while. */
4176
4177 in_p = 0;
4178 low = high = build_int_cst (TREE_TYPE (exp), 0);
4179
4180 while (1)
4181 {
4182 code = TREE_CODE (exp);
4183 exp_type = TREE_TYPE (exp);
4184 arg0 = NULL_TREE;
4185
4186 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4187 {
4188 if (TREE_OPERAND_LENGTH (exp) > 0)
4189 arg0 = TREE_OPERAND (exp, 0);
4190 if (TREE_CODE_CLASS (code) == tcc_binary
4191 || TREE_CODE_CLASS (code) == tcc_comparison
4192 || (TREE_CODE_CLASS (code) == tcc_expression
4193 && TREE_OPERAND_LENGTH (exp) > 1))
4194 arg1 = TREE_OPERAND (exp, 1);
4195 }
4196 if (arg0 == NULL_TREE)
4197 break;
4198
4199 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4200 &high, &in_p, strict_overflow_p);
4201 if (nexp == NULL_TREE)
4202 break;
4203 exp = nexp;
4204 }
4205
4206 /* If EXP is a constant, we can evaluate whether this is true or false. */
4207 if (TREE_CODE (exp) == INTEGER_CST)
4208 {
4209 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4210 exp, 0, low, 0))
4211 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4212 exp, 1, high, 1)));
4213 low = high = 0;
4214 exp = 0;
4215 }
4216
4217 *pin_p = in_p, *plow = low, *phigh = high;
4218 return exp;
4219 }
4220 \f
4221 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4222 type, TYPE, return an expression to test if EXP is in (or out of, depending
4223 on IN_P) the range. Return 0 if the test couldn't be created. */
4224
4225 tree
4226 build_range_check (location_t loc, tree type, tree exp, int in_p,
4227 tree low, tree high)
4228 {
4229 tree etype = TREE_TYPE (exp), value;
4230
4231 #ifdef HAVE_canonicalize_funcptr_for_compare
4232 /* Disable this optimization for function pointer expressions
4233 on targets that require function pointer canonicalization. */
4234 if (HAVE_canonicalize_funcptr_for_compare
4235 && TREE_CODE (etype) == POINTER_TYPE
4236 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4237 return NULL_TREE;
4238 #endif
4239
4240 if (! in_p)
4241 {
4242 value = build_range_check (loc, type, exp, 1, low, high);
4243 if (value != 0)
4244 return invert_truthvalue_loc (loc, value);
4245
4246 return 0;
4247 }
4248
4249 if (low == 0 && high == 0)
4250 return build_int_cst (type, 1);
4251
4252 if (low == 0)
4253 return fold_build2_loc (loc, LE_EXPR, type, exp,
4254 fold_convert_loc (loc, etype, high));
4255
4256 if (high == 0)
4257 return fold_build2_loc (loc, GE_EXPR, type, exp,
4258 fold_convert_loc (loc, etype, low));
4259
4260 if (operand_equal_p (low, high, 0))
4261 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4262 fold_convert_loc (loc, etype, low));
4263
4264 if (integer_zerop (low))
4265 {
4266 if (! TYPE_UNSIGNED (etype))
4267 {
4268 etype = unsigned_type_for (etype);
4269 high = fold_convert_loc (loc, etype, high);
4270 exp = fold_convert_loc (loc, etype, exp);
4271 }
4272 return build_range_check (loc, type, exp, 1, 0, high);
4273 }
4274
4275 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4276 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4277 {
4278 unsigned HOST_WIDE_INT lo;
4279 HOST_WIDE_INT hi;
4280 int prec;
4281
4282 prec = TYPE_PRECISION (etype);
4283 if (prec <= HOST_BITS_PER_WIDE_INT)
4284 {
4285 hi = 0;
4286 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4287 }
4288 else
4289 {
4290 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4291 lo = (unsigned HOST_WIDE_INT) -1;
4292 }
4293
4294 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4295 {
4296 if (TYPE_UNSIGNED (etype))
4297 {
4298 tree signed_etype = signed_type_for (etype);
4299 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4300 etype
4301 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4302 else
4303 etype = signed_etype;
4304 exp = fold_convert_loc (loc, etype, exp);
4305 }
4306 return fold_build2_loc (loc, GT_EXPR, type, exp,
4307 build_int_cst (etype, 0));
4308 }
4309 }
4310
4311 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4312 This requires wrap-around arithmetics for the type of the expression.
4313 First make sure that arithmetics in this type is valid, then make sure
4314 that it wraps around. */
4315 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4316 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4317 TYPE_UNSIGNED (etype));
4318
4319 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4320 {
4321 tree utype, minv, maxv;
4322
4323 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4324 for the type in question, as we rely on this here. */
4325 utype = unsigned_type_for (etype);
4326 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4327 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4328 integer_one_node, 1);
4329 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4330
4331 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4332 minv, 1, maxv, 1)))
4333 etype = utype;
4334 else
4335 return 0;
4336 }
4337
4338 high = fold_convert_loc (loc, etype, high);
4339 low = fold_convert_loc (loc, etype, low);
4340 exp = fold_convert_loc (loc, etype, exp);
4341
4342 value = const_binop (MINUS_EXPR, high, low);
4343
4344
4345 if (POINTER_TYPE_P (etype))
4346 {
4347 if (value != 0 && !TREE_OVERFLOW (value))
4348 {
4349 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4350 return build_range_check (loc, type,
4351 fold_build_pointer_plus_loc (loc, exp, low),
4352 1, build_int_cst (etype, 0), value);
4353 }
4354 return 0;
4355 }
4356
4357 if (value != 0 && !TREE_OVERFLOW (value))
4358 return build_range_check (loc, type,
4359 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4360 1, build_int_cst (etype, 0), value);
4361
4362 return 0;
4363 }
4364 \f
4365 /* Return the predecessor of VAL in its type, handling the infinite case. */
4366
4367 static tree
4368 range_predecessor (tree val)
4369 {
4370 tree type = TREE_TYPE (val);
4371
4372 if (INTEGRAL_TYPE_P (type)
4373 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4374 return 0;
4375 else
4376 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4377 }
4378
4379 /* Return the successor of VAL in its type, handling the infinite case. */
4380
4381 static tree
4382 range_successor (tree val)
4383 {
4384 tree type = TREE_TYPE (val);
4385
4386 if (INTEGRAL_TYPE_P (type)
4387 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4388 return 0;
4389 else
4390 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4391 }
4392
4393 /* Given two ranges, see if we can merge them into one. Return 1 if we
4394 can, 0 if we can't. Set the output range into the specified parameters. */
4395
4396 bool
4397 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4398 tree high0, int in1_p, tree low1, tree high1)
4399 {
4400 int no_overlap;
4401 int subset;
4402 int temp;
4403 tree tem;
4404 int in_p;
4405 tree low, high;
4406 int lowequal = ((low0 == 0 && low1 == 0)
4407 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4408 low0, 0, low1, 0)));
4409 int highequal = ((high0 == 0 && high1 == 0)
4410 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4411 high0, 1, high1, 1)));
4412
4413 /* Make range 0 be the range that starts first, or ends last if they
4414 start at the same value. Swap them if it isn't. */
4415 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4416 low0, 0, low1, 0))
4417 || (lowequal
4418 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4419 high1, 1, high0, 1))))
4420 {
4421 temp = in0_p, in0_p = in1_p, in1_p = temp;
4422 tem = low0, low0 = low1, low1 = tem;
4423 tem = high0, high0 = high1, high1 = tem;
4424 }
4425
4426 /* Now flag two cases, whether the ranges are disjoint or whether the
4427 second range is totally subsumed in the first. Note that the tests
4428 below are simplified by the ones above. */
4429 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4430 high0, 1, low1, 0));
4431 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4432 high1, 1, high0, 1));
4433
4434 /* We now have four cases, depending on whether we are including or
4435 excluding the two ranges. */
4436 if (in0_p && in1_p)
4437 {
4438 /* If they don't overlap, the result is false. If the second range
4439 is a subset it is the result. Otherwise, the range is from the start
4440 of the second to the end of the first. */
4441 if (no_overlap)
4442 in_p = 0, low = high = 0;
4443 else if (subset)
4444 in_p = 1, low = low1, high = high1;
4445 else
4446 in_p = 1, low = low1, high = high0;
4447 }
4448
4449 else if (in0_p && ! in1_p)
4450 {
4451 /* If they don't overlap, the result is the first range. If they are
4452 equal, the result is false. If the second range is a subset of the
4453 first, and the ranges begin at the same place, we go from just after
4454 the end of the second range to the end of the first. If the second
4455 range is not a subset of the first, or if it is a subset and both
4456 ranges end at the same place, the range starts at the start of the
4457 first range and ends just before the second range.
4458 Otherwise, we can't describe this as a single range. */
4459 if (no_overlap)
4460 in_p = 1, low = low0, high = high0;
4461 else if (lowequal && highequal)
4462 in_p = 0, low = high = 0;
4463 else if (subset && lowequal)
4464 {
4465 low = range_successor (high1);
4466 high = high0;
4467 in_p = 1;
4468 if (low == 0)
4469 {
4470 /* We are in the weird situation where high0 > high1 but
4471 high1 has no successor. Punt. */
4472 return 0;
4473 }
4474 }
4475 else if (! subset || highequal)
4476 {
4477 low = low0;
4478 high = range_predecessor (low1);
4479 in_p = 1;
4480 if (high == 0)
4481 {
4482 /* low0 < low1 but low1 has no predecessor. Punt. */
4483 return 0;
4484 }
4485 }
4486 else
4487 return 0;
4488 }
4489
4490 else if (! in0_p && in1_p)
4491 {
4492 /* If they don't overlap, the result is the second range. If the second
4493 is a subset of the first, the result is false. Otherwise,
4494 the range starts just after the first range and ends at the
4495 end of the second. */
4496 if (no_overlap)
4497 in_p = 1, low = low1, high = high1;
4498 else if (subset || highequal)
4499 in_p = 0, low = high = 0;
4500 else
4501 {
4502 low = range_successor (high0);
4503 high = high1;
4504 in_p = 1;
4505 if (low == 0)
4506 {
4507 /* high1 > high0 but high0 has no successor. Punt. */
4508 return 0;
4509 }
4510 }
4511 }
4512
4513 else
4514 {
4515 /* The case where we are excluding both ranges. Here the complex case
4516 is if they don't overlap. In that case, the only time we have a
4517 range is if they are adjacent. If the second is a subset of the
4518 first, the result is the first. Otherwise, the range to exclude
4519 starts at the beginning of the first range and ends at the end of the
4520 second. */
4521 if (no_overlap)
4522 {
4523 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4524 range_successor (high0),
4525 1, low1, 0)))
4526 in_p = 0, low = low0, high = high1;
4527 else
4528 {
4529 /* Canonicalize - [min, x] into - [-, x]. */
4530 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4531 switch (TREE_CODE (TREE_TYPE (low0)))
4532 {
4533 case ENUMERAL_TYPE:
4534 if (TYPE_PRECISION (TREE_TYPE (low0))
4535 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4536 break;
4537 /* FALLTHROUGH */
4538 case INTEGER_TYPE:
4539 if (tree_int_cst_equal (low0,
4540 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4541 low0 = 0;
4542 break;
4543 case POINTER_TYPE:
4544 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4545 && integer_zerop (low0))
4546 low0 = 0;
4547 break;
4548 default:
4549 break;
4550 }
4551
4552 /* Canonicalize - [x, max] into - [x, -]. */
4553 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4554 switch (TREE_CODE (TREE_TYPE (high1)))
4555 {
4556 case ENUMERAL_TYPE:
4557 if (TYPE_PRECISION (TREE_TYPE (high1))
4558 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4559 break;
4560 /* FALLTHROUGH */
4561 case INTEGER_TYPE:
4562 if (tree_int_cst_equal (high1,
4563 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4564 high1 = 0;
4565 break;
4566 case POINTER_TYPE:
4567 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4568 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4569 high1, 1,
4570 integer_one_node, 1)))
4571 high1 = 0;
4572 break;
4573 default:
4574 break;
4575 }
4576
4577 /* The ranges might be also adjacent between the maximum and
4578 minimum values of the given type. For
4579 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4580 return + [x + 1, y - 1]. */
4581 if (low0 == 0 && high1 == 0)
4582 {
4583 low = range_successor (high0);
4584 high = range_predecessor (low1);
4585 if (low == 0 || high == 0)
4586 return 0;
4587
4588 in_p = 1;
4589 }
4590 else
4591 return 0;
4592 }
4593 }
4594 else if (subset)
4595 in_p = 0, low = low0, high = high0;
4596 else
4597 in_p = 0, low = low0, high = high1;
4598 }
4599
4600 *pin_p = in_p, *plow = low, *phigh = high;
4601 return 1;
4602 }
4603 \f
4604
4605 /* Subroutine of fold, looking inside expressions of the form
4606 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4607 of the COND_EXPR. This function is being used also to optimize
4608 A op B ? C : A, by reversing the comparison first.
4609
4610 Return a folded expression whose code is not a COND_EXPR
4611 anymore, or NULL_TREE if no folding opportunity is found. */
4612
4613 static tree
4614 fold_cond_expr_with_comparison (location_t loc, tree type,
4615 tree arg0, tree arg1, tree arg2)
4616 {
4617 enum tree_code comp_code = TREE_CODE (arg0);
4618 tree arg00 = TREE_OPERAND (arg0, 0);
4619 tree arg01 = TREE_OPERAND (arg0, 1);
4620 tree arg1_type = TREE_TYPE (arg1);
4621 tree tem;
4622
4623 STRIP_NOPS (arg1);
4624 STRIP_NOPS (arg2);
4625
4626 /* If we have A op 0 ? A : -A, consider applying the following
4627 transformations:
4628
4629 A == 0? A : -A same as -A
4630 A != 0? A : -A same as A
4631 A >= 0? A : -A same as abs (A)
4632 A > 0? A : -A same as abs (A)
4633 A <= 0? A : -A same as -abs (A)
4634 A < 0? A : -A same as -abs (A)
4635
4636 None of these transformations work for modes with signed
4637 zeros. If A is +/-0, the first two transformations will
4638 change the sign of the result (from +0 to -0, or vice
4639 versa). The last four will fix the sign of the result,
4640 even though the original expressions could be positive or
4641 negative, depending on the sign of A.
4642
4643 Note that all these transformations are correct if A is
4644 NaN, since the two alternatives (A and -A) are also NaNs. */
4645 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4646 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4647 ? real_zerop (arg01)
4648 : integer_zerop (arg01))
4649 && ((TREE_CODE (arg2) == NEGATE_EXPR
4650 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4651 /* In the case that A is of the form X-Y, '-A' (arg2) may
4652 have already been folded to Y-X, check for that. */
4653 || (TREE_CODE (arg1) == MINUS_EXPR
4654 && TREE_CODE (arg2) == MINUS_EXPR
4655 && operand_equal_p (TREE_OPERAND (arg1, 0),
4656 TREE_OPERAND (arg2, 1), 0)
4657 && operand_equal_p (TREE_OPERAND (arg1, 1),
4658 TREE_OPERAND (arg2, 0), 0))))
4659 switch (comp_code)
4660 {
4661 case EQ_EXPR:
4662 case UNEQ_EXPR:
4663 tem = fold_convert_loc (loc, arg1_type, arg1);
4664 return pedantic_non_lvalue_loc (loc,
4665 fold_convert_loc (loc, type,
4666 negate_expr (tem)));
4667 case NE_EXPR:
4668 case LTGT_EXPR:
4669 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4670 case UNGE_EXPR:
4671 case UNGT_EXPR:
4672 if (flag_trapping_math)
4673 break;
4674 /* Fall through. */
4675 case GE_EXPR:
4676 case GT_EXPR:
4677 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4678 arg1 = fold_convert_loc (loc, signed_type_for
4679 (TREE_TYPE (arg1)), arg1);
4680 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4681 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4682 case UNLE_EXPR:
4683 case UNLT_EXPR:
4684 if (flag_trapping_math)
4685 break;
4686 case LE_EXPR:
4687 case LT_EXPR:
4688 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4689 arg1 = fold_convert_loc (loc, signed_type_for
4690 (TREE_TYPE (arg1)), arg1);
4691 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4692 return negate_expr (fold_convert_loc (loc, type, tem));
4693 default:
4694 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4695 break;
4696 }
4697
4698 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4699 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4700 both transformations are correct when A is NaN: A != 0
4701 is then true, and A == 0 is false. */
4702
4703 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4704 && integer_zerop (arg01) && integer_zerop (arg2))
4705 {
4706 if (comp_code == NE_EXPR)
4707 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4708 else if (comp_code == EQ_EXPR)
4709 return build_zero_cst (type);
4710 }
4711
4712 /* Try some transformations of A op B ? A : B.
4713
4714 A == B? A : B same as B
4715 A != B? A : B same as A
4716 A >= B? A : B same as max (A, B)
4717 A > B? A : B same as max (B, A)
4718 A <= B? A : B same as min (A, B)
4719 A < B? A : B same as min (B, A)
4720
4721 As above, these transformations don't work in the presence
4722 of signed zeros. For example, if A and B are zeros of
4723 opposite sign, the first two transformations will change
4724 the sign of the result. In the last four, the original
4725 expressions give different results for (A=+0, B=-0) and
4726 (A=-0, B=+0), but the transformed expressions do not.
4727
4728 The first two transformations are correct if either A or B
4729 is a NaN. In the first transformation, the condition will
4730 be false, and B will indeed be chosen. In the case of the
4731 second transformation, the condition A != B will be true,
4732 and A will be chosen.
4733
4734 The conversions to max() and min() are not correct if B is
4735 a number and A is not. The conditions in the original
4736 expressions will be false, so all four give B. The min()
4737 and max() versions would give a NaN instead. */
4738 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4739 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4740 /* Avoid these transformations if the COND_EXPR may be used
4741 as an lvalue in the C++ front-end. PR c++/19199. */
4742 && (in_gimple_form
4743 || VECTOR_TYPE_P (type)
4744 || (strcmp (lang_hooks.name, "GNU C++") != 0
4745 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4746 || ! maybe_lvalue_p (arg1)
4747 || ! maybe_lvalue_p (arg2)))
4748 {
4749 tree comp_op0 = arg00;
4750 tree comp_op1 = arg01;
4751 tree comp_type = TREE_TYPE (comp_op0);
4752
4753 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4754 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4755 {
4756 comp_type = type;
4757 comp_op0 = arg1;
4758 comp_op1 = arg2;
4759 }
4760
4761 switch (comp_code)
4762 {
4763 case EQ_EXPR:
4764 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4765 case NE_EXPR:
4766 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4767 case LE_EXPR:
4768 case LT_EXPR:
4769 case UNLE_EXPR:
4770 case UNLT_EXPR:
4771 /* In C++ a ?: expression can be an lvalue, so put the
4772 operand which will be used if they are equal first
4773 so that we can convert this back to the
4774 corresponding COND_EXPR. */
4775 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4776 {
4777 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4778 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4779 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4780 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4781 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4782 comp_op1, comp_op0);
4783 return pedantic_non_lvalue_loc (loc,
4784 fold_convert_loc (loc, type, tem));
4785 }
4786 break;
4787 case GE_EXPR:
4788 case GT_EXPR:
4789 case UNGE_EXPR:
4790 case UNGT_EXPR:
4791 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4792 {
4793 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4794 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4795 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4796 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4797 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4798 comp_op1, comp_op0);
4799 return pedantic_non_lvalue_loc (loc,
4800 fold_convert_loc (loc, type, tem));
4801 }
4802 break;
4803 case UNEQ_EXPR:
4804 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4805 return pedantic_non_lvalue_loc (loc,
4806 fold_convert_loc (loc, type, arg2));
4807 break;
4808 case LTGT_EXPR:
4809 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4810 return pedantic_non_lvalue_loc (loc,
4811 fold_convert_loc (loc, type, arg1));
4812 break;
4813 default:
4814 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4815 break;
4816 }
4817 }
4818
4819 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4820 we might still be able to simplify this. For example,
4821 if C1 is one less or one more than C2, this might have started
4822 out as a MIN or MAX and been transformed by this function.
4823 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4824
4825 if (INTEGRAL_TYPE_P (type)
4826 && TREE_CODE (arg01) == INTEGER_CST
4827 && TREE_CODE (arg2) == INTEGER_CST)
4828 switch (comp_code)
4829 {
4830 case EQ_EXPR:
4831 if (TREE_CODE (arg1) == INTEGER_CST)
4832 break;
4833 /* We can replace A with C1 in this case. */
4834 arg1 = fold_convert_loc (loc, type, arg01);
4835 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4836
4837 case LT_EXPR:
4838 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4839 MIN_EXPR, to preserve the signedness of the comparison. */
4840 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4841 OEP_ONLY_CONST)
4842 && operand_equal_p (arg01,
4843 const_binop (PLUS_EXPR, arg2,
4844 build_int_cst (type, 1)),
4845 OEP_ONLY_CONST))
4846 {
4847 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4848 fold_convert_loc (loc, TREE_TYPE (arg00),
4849 arg2));
4850 return pedantic_non_lvalue_loc (loc,
4851 fold_convert_loc (loc, type, tem));
4852 }
4853 break;
4854
4855 case LE_EXPR:
4856 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4857 as above. */
4858 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4859 OEP_ONLY_CONST)
4860 && operand_equal_p (arg01,
4861 const_binop (MINUS_EXPR, arg2,
4862 build_int_cst (type, 1)),
4863 OEP_ONLY_CONST))
4864 {
4865 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4866 fold_convert_loc (loc, TREE_TYPE (arg00),
4867 arg2));
4868 return pedantic_non_lvalue_loc (loc,
4869 fold_convert_loc (loc, type, tem));
4870 }
4871 break;
4872
4873 case GT_EXPR:
4874 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4875 MAX_EXPR, to preserve the signedness of the comparison. */
4876 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4877 OEP_ONLY_CONST)
4878 && operand_equal_p (arg01,
4879 const_binop (MINUS_EXPR, arg2,
4880 build_int_cst (type, 1)),
4881 OEP_ONLY_CONST))
4882 {
4883 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4884 fold_convert_loc (loc, TREE_TYPE (arg00),
4885 arg2));
4886 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4887 }
4888 break;
4889
4890 case GE_EXPR:
4891 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4892 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4893 OEP_ONLY_CONST)
4894 && operand_equal_p (arg01,
4895 const_binop (PLUS_EXPR, arg2,
4896 build_int_cst (type, 1)),
4897 OEP_ONLY_CONST))
4898 {
4899 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4900 fold_convert_loc (loc, TREE_TYPE (arg00),
4901 arg2));
4902 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4903 }
4904 break;
4905 case NE_EXPR:
4906 break;
4907 default:
4908 gcc_unreachable ();
4909 }
4910
4911 return NULL_TREE;
4912 }
4913
4914
4915 \f
4916 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4917 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4918 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4919 false) >= 2)
4920 #endif
4921
4922 /* EXP is some logical combination of boolean tests. See if we can
4923 merge it into some range test. Return the new tree if so. */
4924
4925 static tree
4926 fold_range_test (location_t loc, enum tree_code code, tree type,
4927 tree op0, tree op1)
4928 {
4929 int or_op = (code == TRUTH_ORIF_EXPR
4930 || code == TRUTH_OR_EXPR);
4931 int in0_p, in1_p, in_p;
4932 tree low0, low1, low, high0, high1, high;
4933 bool strict_overflow_p = false;
4934 tree lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4935 tree rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4936 tree tem;
4937 const char * const warnmsg = G_("assuming signed overflow does not occur "
4938 "when simplifying range test");
4939
4940 /* If this is an OR operation, invert both sides; we will invert
4941 again at the end. */
4942 if (or_op)
4943 in0_p = ! in0_p, in1_p = ! in1_p;
4944
4945 /* If both expressions are the same, if we can merge the ranges, and we
4946 can build the range test, return it or it inverted. If one of the
4947 ranges is always true or always false, consider it to be the same
4948 expression as the other. */
4949 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4950 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4951 in1_p, low1, high1)
4952 && 0 != (tem = (build_range_check (loc, type,
4953 lhs != 0 ? lhs
4954 : rhs != 0 ? rhs : integer_zero_node,
4955 in_p, low, high))))
4956 {
4957 if (strict_overflow_p)
4958 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
4959 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
4960 }
4961
4962 /* On machines where the branch cost is expensive, if this is a
4963 short-circuited branch and the underlying object on both sides
4964 is the same, make a non-short-circuit operation. */
4965 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
4966 && lhs != 0 && rhs != 0
4967 && (code == TRUTH_ANDIF_EXPR
4968 || code == TRUTH_ORIF_EXPR)
4969 && operand_equal_p (lhs, rhs, 0))
4970 {
4971 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
4972 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
4973 which cases we can't do this. */
4974 if (simple_operand_p (lhs))
4975 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4976 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4977 type, op0, op1);
4978
4979 else if (!lang_hooks.decls.global_bindings_p ()
4980 && !CONTAINS_PLACEHOLDER_P (lhs))
4981 {
4982 tree common = save_expr (lhs);
4983
4984 if (0 != (lhs = build_range_check (loc, type, common,
4985 or_op ? ! in0_p : in0_p,
4986 low0, high0))
4987 && (0 != (rhs = build_range_check (loc, type, common,
4988 or_op ? ! in1_p : in1_p,
4989 low1, high1))))
4990 {
4991 if (strict_overflow_p)
4992 fold_overflow_warning (warnmsg,
4993 WARN_STRICT_OVERFLOW_COMPARISON);
4994 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
4995 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
4996 type, lhs, rhs);
4997 }
4998 }
4999 }
5000
5001 return 0;
5002 }
5003 \f
5004 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5005 bit value. Arrange things so the extra bits will be set to zero if and
5006 only if C is signed-extended to its full width. If MASK is nonzero,
5007 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5008
5009 static tree
5010 unextend (tree c, int p, int unsignedp, tree mask)
5011 {
5012 tree type = TREE_TYPE (c);
5013 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5014 tree temp;
5015
5016 if (p == modesize || unsignedp)
5017 return c;
5018
5019 /* We work by getting just the sign bit into the low-order bit, then
5020 into the high-order bit, then sign-extend. We then XOR that value
5021 with C. */
5022 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5023 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5024
5025 /* We must use a signed type in order to get an arithmetic right shift.
5026 However, we must also avoid introducing accidental overflows, so that
5027 a subsequent call to integer_zerop will work. Hence we must
5028 do the type conversion here. At this point, the constant is either
5029 zero or one, and the conversion to a signed type can never overflow.
5030 We could get an overflow if this conversion is done anywhere else. */
5031 if (TYPE_UNSIGNED (type))
5032 temp = fold_convert (signed_type_for (type), temp);
5033
5034 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5035 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5036 if (mask != 0)
5037 temp = const_binop (BIT_AND_EXPR, temp,
5038 fold_convert (TREE_TYPE (c), mask));
5039 /* If necessary, convert the type back to match the type of C. */
5040 if (TYPE_UNSIGNED (type))
5041 temp = fold_convert (type, temp);
5042
5043 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5044 }
5045 \f
5046 /* For an expression that has the form
5047 (A && B) || ~B
5048 or
5049 (A || B) && ~B,
5050 we can drop one of the inner expressions and simplify to
5051 A || ~B
5052 or
5053 A && ~B
5054 LOC is the location of the resulting expression. OP is the inner
5055 logical operation; the left-hand side in the examples above, while CMPOP
5056 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5057 removing a condition that guards another, as in
5058 (A != NULL && A->...) || A == NULL
5059 which we must not transform. If RHS_ONLY is true, only eliminate the
5060 right-most operand of the inner logical operation. */
5061
5062 static tree
5063 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5064 bool rhs_only)
5065 {
5066 tree type = TREE_TYPE (cmpop);
5067 enum tree_code code = TREE_CODE (cmpop);
5068 enum tree_code truthop_code = TREE_CODE (op);
5069 tree lhs = TREE_OPERAND (op, 0);
5070 tree rhs = TREE_OPERAND (op, 1);
5071 tree orig_lhs = lhs, orig_rhs = rhs;
5072 enum tree_code rhs_code = TREE_CODE (rhs);
5073 enum tree_code lhs_code = TREE_CODE (lhs);
5074 enum tree_code inv_code;
5075
5076 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5077 return NULL_TREE;
5078
5079 if (TREE_CODE_CLASS (code) != tcc_comparison)
5080 return NULL_TREE;
5081
5082 if (rhs_code == truthop_code)
5083 {
5084 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5085 if (newrhs != NULL_TREE)
5086 {
5087 rhs = newrhs;
5088 rhs_code = TREE_CODE (rhs);
5089 }
5090 }
5091 if (lhs_code == truthop_code && !rhs_only)
5092 {
5093 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5094 if (newlhs != NULL_TREE)
5095 {
5096 lhs = newlhs;
5097 lhs_code = TREE_CODE (lhs);
5098 }
5099 }
5100
5101 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5102 if (inv_code == rhs_code
5103 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5104 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5105 return lhs;
5106 if (!rhs_only && inv_code == lhs_code
5107 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5108 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5109 return rhs;
5110 if (rhs != orig_rhs || lhs != orig_lhs)
5111 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5112 lhs, rhs);
5113 return NULL_TREE;
5114 }
5115
5116 /* Find ways of folding logical expressions of LHS and RHS:
5117 Try to merge two comparisons to the same innermost item.
5118 Look for range tests like "ch >= '0' && ch <= '9'".
5119 Look for combinations of simple terms on machines with expensive branches
5120 and evaluate the RHS unconditionally.
5121
5122 For example, if we have p->a == 2 && p->b == 4 and we can make an
5123 object large enough to span both A and B, we can do this with a comparison
5124 against the object ANDed with the a mask.
5125
5126 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5127 operations to do this with one comparison.
5128
5129 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5130 function and the one above.
5131
5132 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5133 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5134
5135 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5136 two operands.
5137
5138 We return the simplified tree or 0 if no optimization is possible. */
5139
5140 static tree
5141 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5142 tree lhs, tree rhs)
5143 {
5144 /* If this is the "or" of two comparisons, we can do something if
5145 the comparisons are NE_EXPR. If this is the "and", we can do something
5146 if the comparisons are EQ_EXPR. I.e.,
5147 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5148
5149 WANTED_CODE is this operation code. For single bit fields, we can
5150 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5151 comparison for one-bit fields. */
5152
5153 enum tree_code wanted_code;
5154 enum tree_code lcode, rcode;
5155 tree ll_arg, lr_arg, rl_arg, rr_arg;
5156 tree ll_inner, lr_inner, rl_inner, rr_inner;
5157 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5158 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5159 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5160 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5161 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5162 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5163 enum machine_mode lnmode, rnmode;
5164 tree ll_mask, lr_mask, rl_mask, rr_mask;
5165 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5166 tree l_const, r_const;
5167 tree lntype, rntype, result;
5168 HOST_WIDE_INT first_bit, end_bit;
5169 int volatilep;
5170
5171 /* Start by getting the comparison codes. Fail if anything is volatile.
5172 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5173 it were surrounded with a NE_EXPR. */
5174
5175 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5176 return 0;
5177
5178 lcode = TREE_CODE (lhs);
5179 rcode = TREE_CODE (rhs);
5180
5181 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5182 {
5183 lhs = build2 (NE_EXPR, truth_type, lhs,
5184 build_int_cst (TREE_TYPE (lhs), 0));
5185 lcode = NE_EXPR;
5186 }
5187
5188 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5189 {
5190 rhs = build2 (NE_EXPR, truth_type, rhs,
5191 build_int_cst (TREE_TYPE (rhs), 0));
5192 rcode = NE_EXPR;
5193 }
5194
5195 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5196 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5197 return 0;
5198
5199 ll_arg = TREE_OPERAND (lhs, 0);
5200 lr_arg = TREE_OPERAND (lhs, 1);
5201 rl_arg = TREE_OPERAND (rhs, 0);
5202 rr_arg = TREE_OPERAND (rhs, 1);
5203
5204 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5205 if (simple_operand_p (ll_arg)
5206 && simple_operand_p (lr_arg))
5207 {
5208 if (operand_equal_p (ll_arg, rl_arg, 0)
5209 && operand_equal_p (lr_arg, rr_arg, 0))
5210 {
5211 result = combine_comparisons (loc, code, lcode, rcode,
5212 truth_type, ll_arg, lr_arg);
5213 if (result)
5214 return result;
5215 }
5216 else if (operand_equal_p (ll_arg, rr_arg, 0)
5217 && operand_equal_p (lr_arg, rl_arg, 0))
5218 {
5219 result = combine_comparisons (loc, code, lcode,
5220 swap_tree_comparison (rcode),
5221 truth_type, ll_arg, lr_arg);
5222 if (result)
5223 return result;
5224 }
5225 }
5226
5227 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5228 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5229
5230 /* If the RHS can be evaluated unconditionally and its operands are
5231 simple, it wins to evaluate the RHS unconditionally on machines
5232 with expensive branches. In this case, this isn't a comparison
5233 that can be merged. */
5234
5235 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5236 false) >= 2
5237 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5238 && simple_operand_p (rl_arg)
5239 && simple_operand_p (rr_arg))
5240 {
5241 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5242 if (code == TRUTH_OR_EXPR
5243 && lcode == NE_EXPR && integer_zerop (lr_arg)
5244 && rcode == NE_EXPR && integer_zerop (rr_arg)
5245 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5246 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5247 return build2_loc (loc, NE_EXPR, truth_type,
5248 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5249 ll_arg, rl_arg),
5250 build_int_cst (TREE_TYPE (ll_arg), 0));
5251
5252 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5253 if (code == TRUTH_AND_EXPR
5254 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5255 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5256 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5257 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5258 return build2_loc (loc, EQ_EXPR, truth_type,
5259 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5260 ll_arg, rl_arg),
5261 build_int_cst (TREE_TYPE (ll_arg), 0));
5262 }
5263
5264 /* See if the comparisons can be merged. Then get all the parameters for
5265 each side. */
5266
5267 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5268 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5269 return 0;
5270
5271 volatilep = 0;
5272 ll_inner = decode_field_reference (loc, ll_arg,
5273 &ll_bitsize, &ll_bitpos, &ll_mode,
5274 &ll_unsignedp, &volatilep, &ll_mask,
5275 &ll_and_mask);
5276 lr_inner = decode_field_reference (loc, lr_arg,
5277 &lr_bitsize, &lr_bitpos, &lr_mode,
5278 &lr_unsignedp, &volatilep, &lr_mask,
5279 &lr_and_mask);
5280 rl_inner = decode_field_reference (loc, rl_arg,
5281 &rl_bitsize, &rl_bitpos, &rl_mode,
5282 &rl_unsignedp, &volatilep, &rl_mask,
5283 &rl_and_mask);
5284 rr_inner = decode_field_reference (loc, rr_arg,
5285 &rr_bitsize, &rr_bitpos, &rr_mode,
5286 &rr_unsignedp, &volatilep, &rr_mask,
5287 &rr_and_mask);
5288
5289 /* It must be true that the inner operation on the lhs of each
5290 comparison must be the same if we are to be able to do anything.
5291 Then see if we have constants. If not, the same must be true for
5292 the rhs's. */
5293 if (volatilep || ll_inner == 0 || rl_inner == 0
5294 || ! operand_equal_p (ll_inner, rl_inner, 0))
5295 return 0;
5296
5297 if (TREE_CODE (lr_arg) == INTEGER_CST
5298 && TREE_CODE (rr_arg) == INTEGER_CST)
5299 l_const = lr_arg, r_const = rr_arg;
5300 else if (lr_inner == 0 || rr_inner == 0
5301 || ! operand_equal_p (lr_inner, rr_inner, 0))
5302 return 0;
5303 else
5304 l_const = r_const = 0;
5305
5306 /* If either comparison code is not correct for our logical operation,
5307 fail. However, we can convert a one-bit comparison against zero into
5308 the opposite comparison against that bit being set in the field. */
5309
5310 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5311 if (lcode != wanted_code)
5312 {
5313 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5314 {
5315 /* Make the left operand unsigned, since we are only interested
5316 in the value of one bit. Otherwise we are doing the wrong
5317 thing below. */
5318 ll_unsignedp = 1;
5319 l_const = ll_mask;
5320 }
5321 else
5322 return 0;
5323 }
5324
5325 /* This is analogous to the code for l_const above. */
5326 if (rcode != wanted_code)
5327 {
5328 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5329 {
5330 rl_unsignedp = 1;
5331 r_const = rl_mask;
5332 }
5333 else
5334 return 0;
5335 }
5336
5337 /* See if we can find a mode that contains both fields being compared on
5338 the left. If we can't, fail. Otherwise, update all constants and masks
5339 to be relative to a field of that size. */
5340 first_bit = MIN (ll_bitpos, rl_bitpos);
5341 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5342 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5343 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5344 volatilep);
5345 if (lnmode == VOIDmode)
5346 return 0;
5347
5348 lnbitsize = GET_MODE_BITSIZE (lnmode);
5349 lnbitpos = first_bit & ~ (lnbitsize - 1);
5350 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5351 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5352
5353 if (BYTES_BIG_ENDIAN)
5354 {
5355 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5356 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5357 }
5358
5359 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5360 size_int (xll_bitpos));
5361 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5362 size_int (xrl_bitpos));
5363
5364 if (l_const)
5365 {
5366 l_const = fold_convert_loc (loc, lntype, l_const);
5367 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5368 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5369 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5370 fold_build1_loc (loc, BIT_NOT_EXPR,
5371 lntype, ll_mask))))
5372 {
5373 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5374
5375 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5376 }
5377 }
5378 if (r_const)
5379 {
5380 r_const = fold_convert_loc (loc, lntype, r_const);
5381 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5382 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5383 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5384 fold_build1_loc (loc, BIT_NOT_EXPR,
5385 lntype, rl_mask))))
5386 {
5387 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5388
5389 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5390 }
5391 }
5392
5393 /* If the right sides are not constant, do the same for it. Also,
5394 disallow this optimization if a size or signedness mismatch occurs
5395 between the left and right sides. */
5396 if (l_const == 0)
5397 {
5398 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5399 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5400 /* Make sure the two fields on the right
5401 correspond to the left without being swapped. */
5402 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5403 return 0;
5404
5405 first_bit = MIN (lr_bitpos, rr_bitpos);
5406 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5407 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5408 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5409 volatilep);
5410 if (rnmode == VOIDmode)
5411 return 0;
5412
5413 rnbitsize = GET_MODE_BITSIZE (rnmode);
5414 rnbitpos = first_bit & ~ (rnbitsize - 1);
5415 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5416 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5417
5418 if (BYTES_BIG_ENDIAN)
5419 {
5420 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5421 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5422 }
5423
5424 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5425 rntype, lr_mask),
5426 size_int (xlr_bitpos));
5427 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5428 rntype, rr_mask),
5429 size_int (xrr_bitpos));
5430
5431 /* Make a mask that corresponds to both fields being compared.
5432 Do this for both items being compared. If the operands are the
5433 same size and the bits being compared are in the same position
5434 then we can do this by masking both and comparing the masked
5435 results. */
5436 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5437 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5438 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5439 {
5440 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5441 ll_unsignedp || rl_unsignedp);
5442 if (! all_ones_mask_p (ll_mask, lnbitsize))
5443 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5444
5445 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5446 lr_unsignedp || rr_unsignedp);
5447 if (! all_ones_mask_p (lr_mask, rnbitsize))
5448 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5449
5450 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5451 }
5452
5453 /* There is still another way we can do something: If both pairs of
5454 fields being compared are adjacent, we may be able to make a wider
5455 field containing them both.
5456
5457 Note that we still must mask the lhs/rhs expressions. Furthermore,
5458 the mask must be shifted to account for the shift done by
5459 make_bit_field_ref. */
5460 if ((ll_bitsize + ll_bitpos == rl_bitpos
5461 && lr_bitsize + lr_bitpos == rr_bitpos)
5462 || (ll_bitpos == rl_bitpos + rl_bitsize
5463 && lr_bitpos == rr_bitpos + rr_bitsize))
5464 {
5465 tree type;
5466
5467 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5468 ll_bitsize + rl_bitsize,
5469 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5470 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5471 lr_bitsize + rr_bitsize,
5472 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5473
5474 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5475 size_int (MIN (xll_bitpos, xrl_bitpos)));
5476 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5477 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5478
5479 /* Convert to the smaller type before masking out unwanted bits. */
5480 type = lntype;
5481 if (lntype != rntype)
5482 {
5483 if (lnbitsize > rnbitsize)
5484 {
5485 lhs = fold_convert_loc (loc, rntype, lhs);
5486 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5487 type = rntype;
5488 }
5489 else if (lnbitsize < rnbitsize)
5490 {
5491 rhs = fold_convert_loc (loc, lntype, rhs);
5492 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5493 type = lntype;
5494 }
5495 }
5496
5497 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5498 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5499
5500 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5501 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5502
5503 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5504 }
5505
5506 return 0;
5507 }
5508
5509 /* Handle the case of comparisons with constants. If there is something in
5510 common between the masks, those bits of the constants must be the same.
5511 If not, the condition is always false. Test for this to avoid generating
5512 incorrect code below. */
5513 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5514 if (! integer_zerop (result)
5515 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5516 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5517 {
5518 if (wanted_code == NE_EXPR)
5519 {
5520 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5521 return constant_boolean_node (true, truth_type);
5522 }
5523 else
5524 {
5525 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5526 return constant_boolean_node (false, truth_type);
5527 }
5528 }
5529
5530 /* Construct the expression we will return. First get the component
5531 reference we will make. Unless the mask is all ones the width of
5532 that field, perform the mask operation. Then compare with the
5533 merged constant. */
5534 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5535 ll_unsignedp || rl_unsignedp);
5536
5537 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5538 if (! all_ones_mask_p (ll_mask, lnbitsize))
5539 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5540
5541 return build2_loc (loc, wanted_code, truth_type, result,
5542 const_binop (BIT_IOR_EXPR, l_const, r_const));
5543 }
5544 \f
5545 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5546 constant. */
5547
5548 static tree
5549 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5550 tree op0, tree op1)
5551 {
5552 tree arg0 = op0;
5553 enum tree_code op_code;
5554 tree comp_const;
5555 tree minmax_const;
5556 int consts_equal, consts_lt;
5557 tree inner;
5558
5559 STRIP_SIGN_NOPS (arg0);
5560
5561 op_code = TREE_CODE (arg0);
5562 minmax_const = TREE_OPERAND (arg0, 1);
5563 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5564 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5565 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5566 inner = TREE_OPERAND (arg0, 0);
5567
5568 /* If something does not permit us to optimize, return the original tree. */
5569 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5570 || TREE_CODE (comp_const) != INTEGER_CST
5571 || TREE_OVERFLOW (comp_const)
5572 || TREE_CODE (minmax_const) != INTEGER_CST
5573 || TREE_OVERFLOW (minmax_const))
5574 return NULL_TREE;
5575
5576 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5577 and GT_EXPR, doing the rest with recursive calls using logical
5578 simplifications. */
5579 switch (code)
5580 {
5581 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5582 {
5583 tree tem
5584 = optimize_minmax_comparison (loc,
5585 invert_tree_comparison (code, false),
5586 type, op0, op1);
5587 if (tem)
5588 return invert_truthvalue_loc (loc, tem);
5589 return NULL_TREE;
5590 }
5591
5592 case GE_EXPR:
5593 return
5594 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5595 optimize_minmax_comparison
5596 (loc, EQ_EXPR, type, arg0, comp_const),
5597 optimize_minmax_comparison
5598 (loc, GT_EXPR, type, arg0, comp_const));
5599
5600 case EQ_EXPR:
5601 if (op_code == MAX_EXPR && consts_equal)
5602 /* MAX (X, 0) == 0 -> X <= 0 */
5603 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5604
5605 else if (op_code == MAX_EXPR && consts_lt)
5606 /* MAX (X, 0) == 5 -> X == 5 */
5607 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5608
5609 else if (op_code == MAX_EXPR)
5610 /* MAX (X, 0) == -1 -> false */
5611 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5612
5613 else if (consts_equal)
5614 /* MIN (X, 0) == 0 -> X >= 0 */
5615 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5616
5617 else if (consts_lt)
5618 /* MIN (X, 0) == 5 -> false */
5619 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5620
5621 else
5622 /* MIN (X, 0) == -1 -> X == -1 */
5623 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5624
5625 case GT_EXPR:
5626 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5627 /* MAX (X, 0) > 0 -> X > 0
5628 MAX (X, 0) > 5 -> X > 5 */
5629 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5630
5631 else if (op_code == MAX_EXPR)
5632 /* MAX (X, 0) > -1 -> true */
5633 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5634
5635 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5636 /* MIN (X, 0) > 0 -> false
5637 MIN (X, 0) > 5 -> false */
5638 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5639
5640 else
5641 /* MIN (X, 0) > -1 -> X > -1 */
5642 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5643
5644 default:
5645 return NULL_TREE;
5646 }
5647 }
5648 \f
5649 /* T is an integer expression that is being multiplied, divided, or taken a
5650 modulus (CODE says which and what kind of divide or modulus) by a
5651 constant C. See if we can eliminate that operation by folding it with
5652 other operations already in T. WIDE_TYPE, if non-null, is a type that
5653 should be used for the computation if wider than our type.
5654
5655 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5656 (X * 2) + (Y * 4). We must, however, be assured that either the original
5657 expression would not overflow or that overflow is undefined for the type
5658 in the language in question.
5659
5660 If we return a non-null expression, it is an equivalent form of the
5661 original computation, but need not be in the original type.
5662
5663 We set *STRICT_OVERFLOW_P to true if the return values depends on
5664 signed overflow being undefined. Otherwise we do not change
5665 *STRICT_OVERFLOW_P. */
5666
5667 static tree
5668 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5669 bool *strict_overflow_p)
5670 {
5671 /* To avoid exponential search depth, refuse to allow recursion past
5672 three levels. Beyond that (1) it's highly unlikely that we'll find
5673 something interesting and (2) we've probably processed it before
5674 when we built the inner expression. */
5675
5676 static int depth;
5677 tree ret;
5678
5679 if (depth > 3)
5680 return NULL;
5681
5682 depth++;
5683 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5684 depth--;
5685
5686 return ret;
5687 }
5688
5689 static tree
5690 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5691 bool *strict_overflow_p)
5692 {
5693 tree type = TREE_TYPE (t);
5694 enum tree_code tcode = TREE_CODE (t);
5695 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5696 > GET_MODE_SIZE (TYPE_MODE (type)))
5697 ? wide_type : type);
5698 tree t1, t2;
5699 int same_p = tcode == code;
5700 tree op0 = NULL_TREE, op1 = NULL_TREE;
5701 bool sub_strict_overflow_p;
5702
5703 /* Don't deal with constants of zero here; they confuse the code below. */
5704 if (integer_zerop (c))
5705 return NULL_TREE;
5706
5707 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5708 op0 = TREE_OPERAND (t, 0);
5709
5710 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5711 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5712
5713 /* Note that we need not handle conditional operations here since fold
5714 already handles those cases. So just do arithmetic here. */
5715 switch (tcode)
5716 {
5717 case INTEGER_CST:
5718 /* For a constant, we can always simplify if we are a multiply
5719 or (for divide and modulus) if it is a multiple of our constant. */
5720 if (code == MULT_EXPR
5721 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5722 return const_binop (code, fold_convert (ctype, t),
5723 fold_convert (ctype, c));
5724 break;
5725
5726 CASE_CONVERT: case NON_LVALUE_EXPR:
5727 /* If op0 is an expression ... */
5728 if ((COMPARISON_CLASS_P (op0)
5729 || UNARY_CLASS_P (op0)
5730 || BINARY_CLASS_P (op0)
5731 || VL_EXP_CLASS_P (op0)
5732 || EXPRESSION_CLASS_P (op0))
5733 /* ... and has wrapping overflow, and its type is smaller
5734 than ctype, then we cannot pass through as widening. */
5735 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5736 && (TYPE_PRECISION (ctype)
5737 > TYPE_PRECISION (TREE_TYPE (op0))))
5738 /* ... or this is a truncation (t is narrower than op0),
5739 then we cannot pass through this narrowing. */
5740 || (TYPE_PRECISION (type)
5741 < TYPE_PRECISION (TREE_TYPE (op0)))
5742 /* ... or signedness changes for division or modulus,
5743 then we cannot pass through this conversion. */
5744 || (code != MULT_EXPR
5745 && (TYPE_UNSIGNED (ctype)
5746 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5747 /* ... or has undefined overflow while the converted to
5748 type has not, we cannot do the operation in the inner type
5749 as that would introduce undefined overflow. */
5750 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5751 && !TYPE_OVERFLOW_UNDEFINED (type))))
5752 break;
5753
5754 /* Pass the constant down and see if we can make a simplification. If
5755 we can, replace this expression with the inner simplification for
5756 possible later conversion to our or some other type. */
5757 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5758 && TREE_CODE (t2) == INTEGER_CST
5759 && !TREE_OVERFLOW (t2)
5760 && (0 != (t1 = extract_muldiv (op0, t2, code,
5761 code == MULT_EXPR
5762 ? ctype : NULL_TREE,
5763 strict_overflow_p))))
5764 return t1;
5765 break;
5766
5767 case ABS_EXPR:
5768 /* If widening the type changes it from signed to unsigned, then we
5769 must avoid building ABS_EXPR itself as unsigned. */
5770 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5771 {
5772 tree cstype = (*signed_type_for) (ctype);
5773 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5774 != 0)
5775 {
5776 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5777 return fold_convert (ctype, t1);
5778 }
5779 break;
5780 }
5781 /* If the constant is negative, we cannot simplify this. */
5782 if (tree_int_cst_sgn (c) == -1)
5783 break;
5784 /* FALLTHROUGH */
5785 case NEGATE_EXPR:
5786 /* For division and modulus, type can't be unsigned, as e.g.
5787 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5788 For signed types, even with wrapping overflow, this is fine. */
5789 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5790 break;
5791 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5792 != 0)
5793 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5794 break;
5795
5796 case MIN_EXPR: case MAX_EXPR:
5797 /* If widening the type changes the signedness, then we can't perform
5798 this optimization as that changes the result. */
5799 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5800 break;
5801
5802 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5803 sub_strict_overflow_p = false;
5804 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5805 &sub_strict_overflow_p)) != 0
5806 && (t2 = extract_muldiv (op1, c, code, wide_type,
5807 &sub_strict_overflow_p)) != 0)
5808 {
5809 if (tree_int_cst_sgn (c) < 0)
5810 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5811 if (sub_strict_overflow_p)
5812 *strict_overflow_p = true;
5813 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5814 fold_convert (ctype, t2));
5815 }
5816 break;
5817
5818 case LSHIFT_EXPR: case RSHIFT_EXPR:
5819 /* If the second operand is constant, this is a multiplication
5820 or floor division, by a power of two, so we can treat it that
5821 way unless the multiplier or divisor overflows. Signed
5822 left-shift overflow is implementation-defined rather than
5823 undefined in C90, so do not convert signed left shift into
5824 multiplication. */
5825 if (TREE_CODE (op1) == INTEGER_CST
5826 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5827 /* const_binop may not detect overflow correctly,
5828 so check for it explicitly here. */
5829 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5830 && TREE_INT_CST_HIGH (op1) == 0
5831 && 0 != (t1 = fold_convert (ctype,
5832 const_binop (LSHIFT_EXPR,
5833 size_one_node,
5834 op1)))
5835 && !TREE_OVERFLOW (t1))
5836 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5837 ? MULT_EXPR : FLOOR_DIV_EXPR,
5838 ctype,
5839 fold_convert (ctype, op0),
5840 t1),
5841 c, code, wide_type, strict_overflow_p);
5842 break;
5843
5844 case PLUS_EXPR: case MINUS_EXPR:
5845 /* See if we can eliminate the operation on both sides. If we can, we
5846 can return a new PLUS or MINUS. If we can't, the only remaining
5847 cases where we can do anything are if the second operand is a
5848 constant. */
5849 sub_strict_overflow_p = false;
5850 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5851 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5852 if (t1 != 0 && t2 != 0
5853 && (code == MULT_EXPR
5854 /* If not multiplication, we can only do this if both operands
5855 are divisible by c. */
5856 || (multiple_of_p (ctype, op0, c)
5857 && multiple_of_p (ctype, op1, c))))
5858 {
5859 if (sub_strict_overflow_p)
5860 *strict_overflow_p = true;
5861 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5862 fold_convert (ctype, t2));
5863 }
5864
5865 /* If this was a subtraction, negate OP1 and set it to be an addition.
5866 This simplifies the logic below. */
5867 if (tcode == MINUS_EXPR)
5868 {
5869 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5870 /* If OP1 was not easily negatable, the constant may be OP0. */
5871 if (TREE_CODE (op0) == INTEGER_CST)
5872 {
5873 tree tem = op0;
5874 op0 = op1;
5875 op1 = tem;
5876 tem = t1;
5877 t1 = t2;
5878 t2 = tem;
5879 }
5880 }
5881
5882 if (TREE_CODE (op1) != INTEGER_CST)
5883 break;
5884
5885 /* If either OP1 or C are negative, this optimization is not safe for
5886 some of the division and remainder types while for others we need
5887 to change the code. */
5888 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5889 {
5890 if (code == CEIL_DIV_EXPR)
5891 code = FLOOR_DIV_EXPR;
5892 else if (code == FLOOR_DIV_EXPR)
5893 code = CEIL_DIV_EXPR;
5894 else if (code != MULT_EXPR
5895 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5896 break;
5897 }
5898
5899 /* If it's a multiply or a division/modulus operation of a multiple
5900 of our constant, do the operation and verify it doesn't overflow. */
5901 if (code == MULT_EXPR
5902 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5903 {
5904 op1 = const_binop (code, fold_convert (ctype, op1),
5905 fold_convert (ctype, c));
5906 /* We allow the constant to overflow with wrapping semantics. */
5907 if (op1 == 0
5908 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5909 break;
5910 }
5911 else
5912 break;
5913
5914 /* If we have an unsigned type, we cannot widen the operation since it
5915 will change the result if the original computation overflowed. */
5916 if (TYPE_UNSIGNED (ctype) && ctype != type)
5917 break;
5918
5919 /* If we were able to eliminate our operation from the first side,
5920 apply our operation to the second side and reform the PLUS. */
5921 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5922 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5923
5924 /* The last case is if we are a multiply. In that case, we can
5925 apply the distributive law to commute the multiply and addition
5926 if the multiplication of the constants doesn't overflow
5927 and overflow is defined. With undefined overflow
5928 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5929 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5930 return fold_build2 (tcode, ctype,
5931 fold_build2 (code, ctype,
5932 fold_convert (ctype, op0),
5933 fold_convert (ctype, c)),
5934 op1);
5935
5936 break;
5937
5938 case MULT_EXPR:
5939 /* We have a special case here if we are doing something like
5940 (C * 8) % 4 since we know that's zero. */
5941 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5942 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5943 /* If the multiplication can overflow we cannot optimize this. */
5944 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5945 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5946 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5947 {
5948 *strict_overflow_p = true;
5949 return omit_one_operand (type, integer_zero_node, op0);
5950 }
5951
5952 /* ... fall through ... */
5953
5954 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5955 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
5956 /* If we can extract our operation from the LHS, do so and return a
5957 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
5958 do something only if the second operand is a constant. */
5959 if (same_p
5960 && (t1 = extract_muldiv (op0, c, code, wide_type,
5961 strict_overflow_p)) != 0)
5962 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5963 fold_convert (ctype, op1));
5964 else if (tcode == MULT_EXPR && code == MULT_EXPR
5965 && (t1 = extract_muldiv (op1, c, code, wide_type,
5966 strict_overflow_p)) != 0)
5967 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5968 fold_convert (ctype, t1));
5969 else if (TREE_CODE (op1) != INTEGER_CST)
5970 return 0;
5971
5972 /* If these are the same operation types, we can associate them
5973 assuming no overflow. */
5974 if (tcode == code)
5975 {
5976 double_int mul;
5977 bool overflow_p;
5978 unsigned prec = TYPE_PRECISION (ctype);
5979 bool uns = TYPE_UNSIGNED (ctype);
5980 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
5981 double_int dic = tree_to_double_int (c).ext (prec, uns);
5982 mul = diop1.mul_with_sign (dic, false, &overflow_p);
5983 overflow_p = ((!uns && overflow_p)
5984 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
5985 if (!double_int_fits_to_tree_p (ctype, mul)
5986 && ((uns && tcode != MULT_EXPR) || !uns))
5987 overflow_p = 1;
5988 if (!overflow_p)
5989 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
5990 double_int_to_tree (ctype, mul));
5991 }
5992
5993 /* If these operations "cancel" each other, we have the main
5994 optimizations of this pass, which occur when either constant is a
5995 multiple of the other, in which case we replace this with either an
5996 operation or CODE or TCODE.
5997
5998 If we have an unsigned type, we cannot do this since it will change
5999 the result if the original computation overflowed. */
6000 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6001 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6002 || (tcode == MULT_EXPR
6003 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6004 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6005 && code != MULT_EXPR)))
6006 {
6007 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6008 {
6009 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6010 *strict_overflow_p = true;
6011 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype,
6013 const_binop (TRUNC_DIV_EXPR,
6014 op1, c)));
6015 }
6016 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6017 {
6018 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6019 *strict_overflow_p = true;
6020 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6021 fold_convert (ctype,
6022 const_binop (TRUNC_DIV_EXPR,
6023 c, op1)));
6024 }
6025 }
6026 break;
6027
6028 default:
6029 break;
6030 }
6031
6032 return 0;
6033 }
6034 \f
6035 /* Return a node which has the indicated constant VALUE (either 0 or
6036 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6037 and is of the indicated TYPE. */
6038
6039 tree
6040 constant_boolean_node (bool value, tree type)
6041 {
6042 if (type == integer_type_node)
6043 return value ? integer_one_node : integer_zero_node;
6044 else if (type == boolean_type_node)
6045 return value ? boolean_true_node : boolean_false_node;
6046 else if (TREE_CODE (type) == VECTOR_TYPE)
6047 return build_vector_from_val (type,
6048 build_int_cst (TREE_TYPE (type),
6049 value ? -1 : 0));
6050 else
6051 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6052 }
6053
6054
6055 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6056 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6057 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6058 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6059 COND is the first argument to CODE; otherwise (as in the example
6060 given here), it is the second argument. TYPE is the type of the
6061 original expression. Return NULL_TREE if no simplification is
6062 possible. */
6063
6064 static tree
6065 fold_binary_op_with_conditional_arg (location_t loc,
6066 enum tree_code code,
6067 tree type, tree op0, tree op1,
6068 tree cond, tree arg, int cond_first_p)
6069 {
6070 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6071 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6072 tree test, true_value, false_value;
6073 tree lhs = NULL_TREE;
6074 tree rhs = NULL_TREE;
6075 enum tree_code cond_code = COND_EXPR;
6076
6077 if (TREE_CODE (cond) == COND_EXPR
6078 || TREE_CODE (cond) == VEC_COND_EXPR)
6079 {
6080 test = TREE_OPERAND (cond, 0);
6081 true_value = TREE_OPERAND (cond, 1);
6082 false_value = TREE_OPERAND (cond, 2);
6083 /* If this operand throws an expression, then it does not make
6084 sense to try to perform a logical or arithmetic operation
6085 involving it. */
6086 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6087 lhs = true_value;
6088 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6089 rhs = false_value;
6090 }
6091 else
6092 {
6093 tree testtype = TREE_TYPE (cond);
6094 test = cond;
6095 true_value = constant_boolean_node (true, testtype);
6096 false_value = constant_boolean_node (false, testtype);
6097 }
6098
6099 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6100 cond_code = VEC_COND_EXPR;
6101
6102 /* This transformation is only worthwhile if we don't have to wrap ARG
6103 in a SAVE_EXPR and the operation can be simplified without recursing
6104 on at least one of the branches once its pushed inside the COND_EXPR. */
6105 if (!TREE_CONSTANT (arg)
6106 && (TREE_SIDE_EFFECTS (arg)
6107 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6108 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6109 return NULL_TREE;
6110
6111 arg = fold_convert_loc (loc, arg_type, arg);
6112 if (lhs == 0)
6113 {
6114 true_value = fold_convert_loc (loc, cond_type, true_value);
6115 if (cond_first_p)
6116 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6117 else
6118 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6119 }
6120 if (rhs == 0)
6121 {
6122 false_value = fold_convert_loc (loc, cond_type, false_value);
6123 if (cond_first_p)
6124 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6125 else
6126 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6127 }
6128
6129 /* Check that we have simplified at least one of the branches. */
6130 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6131 return NULL_TREE;
6132
6133 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6134 }
6135
6136 \f
6137 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6138
6139 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6140 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6141 ADDEND is the same as X.
6142
6143 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6144 and finite. The problematic cases are when X is zero, and its mode
6145 has signed zeros. In the case of rounding towards -infinity,
6146 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6147 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6148
6149 bool
6150 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6151 {
6152 if (!real_zerop (addend))
6153 return false;
6154
6155 /* Don't allow the fold with -fsignaling-nans. */
6156 if (HONOR_SNANS (TYPE_MODE (type)))
6157 return false;
6158
6159 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6160 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6161 return true;
6162
6163 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6164 if (TREE_CODE (addend) == REAL_CST
6165 && REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6166 negate = !negate;
6167
6168 /* The mode has signed zeros, and we have to honor their sign.
6169 In this situation, there is only one case we can return true for.
6170 X - 0 is the same as X unless rounding towards -infinity is
6171 supported. */
6172 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6173 }
6174
6175 /* Subroutine of fold() that checks comparisons of built-in math
6176 functions against real constants.
6177
6178 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6179 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6180 is the type of the result and ARG0 and ARG1 are the operands of the
6181 comparison. ARG1 must be a TREE_REAL_CST.
6182
6183 The function returns the constant folded tree if a simplification
6184 can be made, and NULL_TREE otherwise. */
6185
6186 static tree
6187 fold_mathfn_compare (location_t loc,
6188 enum built_in_function fcode, enum tree_code code,
6189 tree type, tree arg0, tree arg1)
6190 {
6191 REAL_VALUE_TYPE c;
6192
6193 if (BUILTIN_SQRT_P (fcode))
6194 {
6195 tree arg = CALL_EXPR_ARG (arg0, 0);
6196 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6197
6198 c = TREE_REAL_CST (arg1);
6199 if (REAL_VALUE_NEGATIVE (c))
6200 {
6201 /* sqrt(x) < y is always false, if y is negative. */
6202 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6203 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6204
6205 /* sqrt(x) > y is always true, if y is negative and we
6206 don't care about NaNs, i.e. negative values of x. */
6207 if (code == NE_EXPR || !HONOR_NANS (mode))
6208 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6209
6210 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6211 return fold_build2_loc (loc, GE_EXPR, type, arg,
6212 build_real (TREE_TYPE (arg), dconst0));
6213 }
6214 else if (code == GT_EXPR || code == GE_EXPR)
6215 {
6216 REAL_VALUE_TYPE c2;
6217
6218 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6219 real_convert (&c2, mode, &c2);
6220
6221 if (REAL_VALUE_ISINF (c2))
6222 {
6223 /* sqrt(x) > y is x == +Inf, when y is very large. */
6224 if (HONOR_INFINITIES (mode))
6225 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6226 build_real (TREE_TYPE (arg), c2));
6227
6228 /* sqrt(x) > y is always false, when y is very large
6229 and we don't care about infinities. */
6230 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6231 }
6232
6233 /* sqrt(x) > c is the same as x > c*c. */
6234 return fold_build2_loc (loc, code, type, arg,
6235 build_real (TREE_TYPE (arg), c2));
6236 }
6237 else if (code == LT_EXPR || code == LE_EXPR)
6238 {
6239 REAL_VALUE_TYPE c2;
6240
6241 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6242 real_convert (&c2, mode, &c2);
6243
6244 if (REAL_VALUE_ISINF (c2))
6245 {
6246 /* sqrt(x) < y is always true, when y is a very large
6247 value and we don't care about NaNs or Infinities. */
6248 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6249 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6250
6251 /* sqrt(x) < y is x != +Inf when y is very large and we
6252 don't care about NaNs. */
6253 if (! HONOR_NANS (mode))
6254 return fold_build2_loc (loc, NE_EXPR, type, arg,
6255 build_real (TREE_TYPE (arg), c2));
6256
6257 /* sqrt(x) < y is x >= 0 when y is very large and we
6258 don't care about Infinities. */
6259 if (! HONOR_INFINITIES (mode))
6260 return fold_build2_loc (loc, GE_EXPR, type, arg,
6261 build_real (TREE_TYPE (arg), dconst0));
6262
6263 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6264 arg = save_expr (arg);
6265 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6266 fold_build2_loc (loc, GE_EXPR, type, arg,
6267 build_real (TREE_TYPE (arg),
6268 dconst0)),
6269 fold_build2_loc (loc, NE_EXPR, type, arg,
6270 build_real (TREE_TYPE (arg),
6271 c2)));
6272 }
6273
6274 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6275 if (! HONOR_NANS (mode))
6276 return fold_build2_loc (loc, code, type, arg,
6277 build_real (TREE_TYPE (arg), c2));
6278
6279 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6280 arg = save_expr (arg);
6281 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6282 fold_build2_loc (loc, GE_EXPR, type, arg,
6283 build_real (TREE_TYPE (arg),
6284 dconst0)),
6285 fold_build2_loc (loc, code, type, arg,
6286 build_real (TREE_TYPE (arg),
6287 c2)));
6288 }
6289 }
6290
6291 return NULL_TREE;
6292 }
6293
6294 /* Subroutine of fold() that optimizes comparisons against Infinities,
6295 either +Inf or -Inf.
6296
6297 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6298 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6299 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6300
6301 The function returns the constant folded tree if a simplification
6302 can be made, and NULL_TREE otherwise. */
6303
6304 static tree
6305 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6306 tree arg0, tree arg1)
6307 {
6308 enum machine_mode mode;
6309 REAL_VALUE_TYPE max;
6310 tree temp;
6311 bool neg;
6312
6313 mode = TYPE_MODE (TREE_TYPE (arg0));
6314
6315 /* For negative infinity swap the sense of the comparison. */
6316 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6317 if (neg)
6318 code = swap_tree_comparison (code);
6319
6320 switch (code)
6321 {
6322 case GT_EXPR:
6323 /* x > +Inf is always false, if with ignore sNANs. */
6324 if (HONOR_SNANS (mode))
6325 return NULL_TREE;
6326 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6327
6328 case LE_EXPR:
6329 /* x <= +Inf is always true, if we don't case about NaNs. */
6330 if (! HONOR_NANS (mode))
6331 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6332
6333 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6334 arg0 = save_expr (arg0);
6335 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6336
6337 case EQ_EXPR:
6338 case GE_EXPR:
6339 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6340 real_maxval (&max, neg, mode);
6341 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6342 arg0, build_real (TREE_TYPE (arg0), max));
6343
6344 case LT_EXPR:
6345 /* x < +Inf is always equal to x <= DBL_MAX. */
6346 real_maxval (&max, neg, mode);
6347 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6348 arg0, build_real (TREE_TYPE (arg0), max));
6349
6350 case NE_EXPR:
6351 /* x != +Inf is always equal to !(x > DBL_MAX). */
6352 real_maxval (&max, neg, mode);
6353 if (! HONOR_NANS (mode))
6354 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6355 arg0, build_real (TREE_TYPE (arg0), max));
6356
6357 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6358 arg0, build_real (TREE_TYPE (arg0), max));
6359 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6360
6361 default:
6362 break;
6363 }
6364
6365 return NULL_TREE;
6366 }
6367
6368 /* Subroutine of fold() that optimizes comparisons of a division by
6369 a nonzero integer constant against an integer constant, i.e.
6370 X/C1 op C2.
6371
6372 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6373 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6374 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6375
6376 The function returns the constant folded tree if a simplification
6377 can be made, and NULL_TREE otherwise. */
6378
6379 static tree
6380 fold_div_compare (location_t loc,
6381 enum tree_code code, tree type, tree arg0, tree arg1)
6382 {
6383 tree prod, tmp, hi, lo;
6384 tree arg00 = TREE_OPERAND (arg0, 0);
6385 tree arg01 = TREE_OPERAND (arg0, 1);
6386 double_int val;
6387 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6388 bool neg_overflow;
6389 bool overflow;
6390
6391 /* We have to do this the hard way to detect unsigned overflow.
6392 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6393 val = TREE_INT_CST (arg01)
6394 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6395 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6396 neg_overflow = false;
6397
6398 if (unsigned_p)
6399 {
6400 tmp = int_const_binop (MINUS_EXPR, arg01,
6401 build_int_cst (TREE_TYPE (arg01), 1));
6402 lo = prod;
6403
6404 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6405 val = TREE_INT_CST (prod)
6406 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6407 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6408 -1, overflow | TREE_OVERFLOW (prod));
6409 }
6410 else if (tree_int_cst_sgn (arg01) >= 0)
6411 {
6412 tmp = int_const_binop (MINUS_EXPR, arg01,
6413 build_int_cst (TREE_TYPE (arg01), 1));
6414 switch (tree_int_cst_sgn (arg1))
6415 {
6416 case -1:
6417 neg_overflow = true;
6418 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6419 hi = prod;
6420 break;
6421
6422 case 0:
6423 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6424 hi = tmp;
6425 break;
6426
6427 case 1:
6428 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6429 lo = prod;
6430 break;
6431
6432 default:
6433 gcc_unreachable ();
6434 }
6435 }
6436 else
6437 {
6438 /* A negative divisor reverses the relational operators. */
6439 code = swap_tree_comparison (code);
6440
6441 tmp = int_const_binop (PLUS_EXPR, arg01,
6442 build_int_cst (TREE_TYPE (arg01), 1));
6443 switch (tree_int_cst_sgn (arg1))
6444 {
6445 case -1:
6446 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6447 lo = prod;
6448 break;
6449
6450 case 0:
6451 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6452 lo = tmp;
6453 break;
6454
6455 case 1:
6456 neg_overflow = true;
6457 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6458 hi = prod;
6459 break;
6460
6461 default:
6462 gcc_unreachable ();
6463 }
6464 }
6465
6466 switch (code)
6467 {
6468 case EQ_EXPR:
6469 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6470 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6471 if (TREE_OVERFLOW (hi))
6472 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6473 if (TREE_OVERFLOW (lo))
6474 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6475 return build_range_check (loc, type, arg00, 1, lo, hi);
6476
6477 case NE_EXPR:
6478 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6479 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6480 if (TREE_OVERFLOW (hi))
6481 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6482 if (TREE_OVERFLOW (lo))
6483 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6484 return build_range_check (loc, type, arg00, 0, lo, hi);
6485
6486 case LT_EXPR:
6487 if (TREE_OVERFLOW (lo))
6488 {
6489 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6490 return omit_one_operand_loc (loc, type, tmp, arg00);
6491 }
6492 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6493
6494 case LE_EXPR:
6495 if (TREE_OVERFLOW (hi))
6496 {
6497 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6498 return omit_one_operand_loc (loc, type, tmp, arg00);
6499 }
6500 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6501
6502 case GT_EXPR:
6503 if (TREE_OVERFLOW (hi))
6504 {
6505 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6506 return omit_one_operand_loc (loc, type, tmp, arg00);
6507 }
6508 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6509
6510 case GE_EXPR:
6511 if (TREE_OVERFLOW (lo))
6512 {
6513 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6514 return omit_one_operand_loc (loc, type, tmp, arg00);
6515 }
6516 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6517
6518 default:
6519 break;
6520 }
6521
6522 return NULL_TREE;
6523 }
6524
6525
6526 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6527 equality/inequality test, then return a simplified form of the test
6528 using a sign testing. Otherwise return NULL. TYPE is the desired
6529 result type. */
6530
6531 static tree
6532 fold_single_bit_test_into_sign_test (location_t loc,
6533 enum tree_code code, tree arg0, tree arg1,
6534 tree result_type)
6535 {
6536 /* If this is testing a single bit, we can optimize the test. */
6537 if ((code == NE_EXPR || code == EQ_EXPR)
6538 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6539 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6540 {
6541 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6542 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6543 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6544
6545 if (arg00 != NULL_TREE
6546 /* This is only a win if casting to a signed type is cheap,
6547 i.e. when arg00's type is not a partial mode. */
6548 && TYPE_PRECISION (TREE_TYPE (arg00))
6549 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6550 {
6551 tree stype = signed_type_for (TREE_TYPE (arg00));
6552 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6553 result_type,
6554 fold_convert_loc (loc, stype, arg00),
6555 build_int_cst (stype, 0));
6556 }
6557 }
6558
6559 return NULL_TREE;
6560 }
6561
6562 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6563 equality/inequality test, then return a simplified form of
6564 the test using shifts and logical operations. Otherwise return
6565 NULL. TYPE is the desired result type. */
6566
6567 tree
6568 fold_single_bit_test (location_t loc, enum tree_code code,
6569 tree arg0, tree arg1, tree result_type)
6570 {
6571 /* If this is testing a single bit, we can optimize the test. */
6572 if ((code == NE_EXPR || code == EQ_EXPR)
6573 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6574 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6575 {
6576 tree inner = TREE_OPERAND (arg0, 0);
6577 tree type = TREE_TYPE (arg0);
6578 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6579 enum machine_mode operand_mode = TYPE_MODE (type);
6580 int ops_unsigned;
6581 tree signed_type, unsigned_type, intermediate_type;
6582 tree tem, one;
6583
6584 /* First, see if we can fold the single bit test into a sign-bit
6585 test. */
6586 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6587 result_type);
6588 if (tem)
6589 return tem;
6590
6591 /* Otherwise we have (A & C) != 0 where C is a single bit,
6592 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6593 Similarly for (A & C) == 0. */
6594
6595 /* If INNER is a right shift of a constant and it plus BITNUM does
6596 not overflow, adjust BITNUM and INNER. */
6597 if (TREE_CODE (inner) == RSHIFT_EXPR
6598 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6599 && TREE_INT_CST_HIGH (TREE_OPERAND (inner, 1)) == 0
6600 && bitnum < TYPE_PRECISION (type)
6601 && 0 > compare_tree_int (TREE_OPERAND (inner, 1),
6602 bitnum - TYPE_PRECISION (type)))
6603 {
6604 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6605 inner = TREE_OPERAND (inner, 0);
6606 }
6607
6608 /* If we are going to be able to omit the AND below, we must do our
6609 operations as unsigned. If we must use the AND, we have a choice.
6610 Normally unsigned is faster, but for some machines signed is. */
6611 #ifdef LOAD_EXTEND_OP
6612 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6613 && !flag_syntax_only) ? 0 : 1;
6614 #else
6615 ops_unsigned = 1;
6616 #endif
6617
6618 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6619 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6620 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6621 inner = fold_convert_loc (loc, intermediate_type, inner);
6622
6623 if (bitnum != 0)
6624 inner = build2 (RSHIFT_EXPR, intermediate_type,
6625 inner, size_int (bitnum));
6626
6627 one = build_int_cst (intermediate_type, 1);
6628
6629 if (code == EQ_EXPR)
6630 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6631
6632 /* Put the AND last so it can combine with more things. */
6633 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6634
6635 /* Make sure to return the proper type. */
6636 inner = fold_convert_loc (loc, result_type, inner);
6637
6638 return inner;
6639 }
6640 return NULL_TREE;
6641 }
6642
6643 /* Check whether we are allowed to reorder operands arg0 and arg1,
6644 such that the evaluation of arg1 occurs before arg0. */
6645
6646 static bool
6647 reorder_operands_p (const_tree arg0, const_tree arg1)
6648 {
6649 if (! flag_evaluation_order)
6650 return true;
6651 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6652 return true;
6653 return ! TREE_SIDE_EFFECTS (arg0)
6654 && ! TREE_SIDE_EFFECTS (arg1);
6655 }
6656
6657 /* Test whether it is preferable two swap two operands, ARG0 and
6658 ARG1, for example because ARG0 is an integer constant and ARG1
6659 isn't. If REORDER is true, only recommend swapping if we can
6660 evaluate the operands in reverse order. */
6661
6662 bool
6663 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6664 {
6665 STRIP_SIGN_NOPS (arg0);
6666 STRIP_SIGN_NOPS (arg1);
6667
6668 if (TREE_CODE (arg1) == INTEGER_CST)
6669 return 0;
6670 if (TREE_CODE (arg0) == INTEGER_CST)
6671 return 1;
6672
6673 if (TREE_CODE (arg1) == REAL_CST)
6674 return 0;
6675 if (TREE_CODE (arg0) == REAL_CST)
6676 return 1;
6677
6678 if (TREE_CODE (arg1) == FIXED_CST)
6679 return 0;
6680 if (TREE_CODE (arg0) == FIXED_CST)
6681 return 1;
6682
6683 if (TREE_CODE (arg1) == COMPLEX_CST)
6684 return 0;
6685 if (TREE_CODE (arg0) == COMPLEX_CST)
6686 return 1;
6687
6688 if (TREE_CONSTANT (arg1))
6689 return 0;
6690 if (TREE_CONSTANT (arg0))
6691 return 1;
6692
6693 if (optimize_function_for_size_p (cfun))
6694 return 0;
6695
6696 if (reorder && flag_evaluation_order
6697 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6698 return 0;
6699
6700 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6701 for commutative and comparison operators. Ensuring a canonical
6702 form allows the optimizers to find additional redundancies without
6703 having to explicitly check for both orderings. */
6704 if (TREE_CODE (arg0) == SSA_NAME
6705 && TREE_CODE (arg1) == SSA_NAME
6706 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6707 return 1;
6708
6709 /* Put SSA_NAMEs last. */
6710 if (TREE_CODE (arg1) == SSA_NAME)
6711 return 0;
6712 if (TREE_CODE (arg0) == SSA_NAME)
6713 return 1;
6714
6715 /* Put variables last. */
6716 if (DECL_P (arg1))
6717 return 0;
6718 if (DECL_P (arg0))
6719 return 1;
6720
6721 return 0;
6722 }
6723
6724 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6725 ARG0 is extended to a wider type. */
6726
6727 static tree
6728 fold_widened_comparison (location_t loc, enum tree_code code,
6729 tree type, tree arg0, tree arg1)
6730 {
6731 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6732 tree arg1_unw;
6733 tree shorter_type, outer_type;
6734 tree min, max;
6735 bool above, below;
6736
6737 if (arg0_unw == arg0)
6738 return NULL_TREE;
6739 shorter_type = TREE_TYPE (arg0_unw);
6740
6741 #ifdef HAVE_canonicalize_funcptr_for_compare
6742 /* Disable this optimization if we're casting a function pointer
6743 type on targets that require function pointer canonicalization. */
6744 if (HAVE_canonicalize_funcptr_for_compare
6745 && TREE_CODE (shorter_type) == POINTER_TYPE
6746 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6747 return NULL_TREE;
6748 #endif
6749
6750 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6751 return NULL_TREE;
6752
6753 arg1_unw = get_unwidened (arg1, NULL_TREE);
6754
6755 /* If possible, express the comparison in the shorter mode. */
6756 if ((code == EQ_EXPR || code == NE_EXPR
6757 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6758 && (TREE_TYPE (arg1_unw) == shorter_type
6759 || ((TYPE_PRECISION (shorter_type)
6760 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6761 && (TYPE_UNSIGNED (shorter_type)
6762 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6763 || (TREE_CODE (arg1_unw) == INTEGER_CST
6764 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6765 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6766 && int_fits_type_p (arg1_unw, shorter_type))))
6767 return fold_build2_loc (loc, code, type, arg0_unw,
6768 fold_convert_loc (loc, shorter_type, arg1_unw));
6769
6770 if (TREE_CODE (arg1_unw) != INTEGER_CST
6771 || TREE_CODE (shorter_type) != INTEGER_TYPE
6772 || !int_fits_type_p (arg1_unw, shorter_type))
6773 return NULL_TREE;
6774
6775 /* If we are comparing with the integer that does not fit into the range
6776 of the shorter type, the result is known. */
6777 outer_type = TREE_TYPE (arg1_unw);
6778 min = lower_bound_in_type (outer_type, shorter_type);
6779 max = upper_bound_in_type (outer_type, shorter_type);
6780
6781 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6782 max, arg1_unw));
6783 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6784 arg1_unw, min));
6785
6786 switch (code)
6787 {
6788 case EQ_EXPR:
6789 if (above || below)
6790 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6791 break;
6792
6793 case NE_EXPR:
6794 if (above || below)
6795 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6796 break;
6797
6798 case LT_EXPR:
6799 case LE_EXPR:
6800 if (above)
6801 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6802 else if (below)
6803 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6804
6805 case GT_EXPR:
6806 case GE_EXPR:
6807 if (above)
6808 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6809 else if (below)
6810 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6811
6812 default:
6813 break;
6814 }
6815
6816 return NULL_TREE;
6817 }
6818
6819 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6820 ARG0 just the signedness is changed. */
6821
6822 static tree
6823 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6824 tree arg0, tree arg1)
6825 {
6826 tree arg0_inner;
6827 tree inner_type, outer_type;
6828
6829 if (!CONVERT_EXPR_P (arg0))
6830 return NULL_TREE;
6831
6832 outer_type = TREE_TYPE (arg0);
6833 arg0_inner = TREE_OPERAND (arg0, 0);
6834 inner_type = TREE_TYPE (arg0_inner);
6835
6836 #ifdef HAVE_canonicalize_funcptr_for_compare
6837 /* Disable this optimization if we're casting a function pointer
6838 type on targets that require function pointer canonicalization. */
6839 if (HAVE_canonicalize_funcptr_for_compare
6840 && TREE_CODE (inner_type) == POINTER_TYPE
6841 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6842 return NULL_TREE;
6843 #endif
6844
6845 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6846 return NULL_TREE;
6847
6848 if (TREE_CODE (arg1) != INTEGER_CST
6849 && !(CONVERT_EXPR_P (arg1)
6850 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6851 return NULL_TREE;
6852
6853 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6854 && code != NE_EXPR
6855 && code != EQ_EXPR)
6856 return NULL_TREE;
6857
6858 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6859 return NULL_TREE;
6860
6861 if (TREE_CODE (arg1) == INTEGER_CST)
6862 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6863 0, TREE_OVERFLOW (arg1));
6864 else
6865 arg1 = fold_convert_loc (loc, inner_type, arg1);
6866
6867 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6868 }
6869
6870 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6871 step of the array. Reconstructs s and delta in the case of s *
6872 delta being an integer constant (and thus already folded). ADDR is
6873 the address. MULT is the multiplicative expression. If the
6874 function succeeds, the new address expression is returned.
6875 Otherwise NULL_TREE is returned. LOC is the location of the
6876 resulting expression. */
6877
6878 static tree
6879 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6880 {
6881 tree s, delta, step;
6882 tree ref = TREE_OPERAND (addr, 0), pref;
6883 tree ret, pos;
6884 tree itype;
6885 bool mdim = false;
6886
6887 /* Strip the nops that might be added when converting op1 to sizetype. */
6888 STRIP_NOPS (op1);
6889
6890 /* Canonicalize op1 into a possibly non-constant delta
6891 and an INTEGER_CST s. */
6892 if (TREE_CODE (op1) == MULT_EXPR)
6893 {
6894 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6895
6896 STRIP_NOPS (arg0);
6897 STRIP_NOPS (arg1);
6898
6899 if (TREE_CODE (arg0) == INTEGER_CST)
6900 {
6901 s = arg0;
6902 delta = arg1;
6903 }
6904 else if (TREE_CODE (arg1) == INTEGER_CST)
6905 {
6906 s = arg1;
6907 delta = arg0;
6908 }
6909 else
6910 return NULL_TREE;
6911 }
6912 else if (TREE_CODE (op1) == INTEGER_CST)
6913 {
6914 delta = op1;
6915 s = NULL_TREE;
6916 }
6917 else
6918 {
6919 /* Simulate we are delta * 1. */
6920 delta = op1;
6921 s = integer_one_node;
6922 }
6923
6924 /* Handle &x.array the same as we would handle &x.array[0]. */
6925 if (TREE_CODE (ref) == COMPONENT_REF
6926 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6927 {
6928 tree domain;
6929
6930 /* Remember if this was a multi-dimensional array. */
6931 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6932 mdim = true;
6933
6934 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6935 if (! domain)
6936 goto cont;
6937 itype = TREE_TYPE (domain);
6938
6939 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6940 if (TREE_CODE (step) != INTEGER_CST)
6941 goto cont;
6942
6943 if (s)
6944 {
6945 if (! tree_int_cst_equal (step, s))
6946 goto cont;
6947 }
6948 else
6949 {
6950 /* Try if delta is a multiple of step. */
6951 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6952 if (! tmp)
6953 goto cont;
6954 delta = tmp;
6955 }
6956
6957 /* Only fold here if we can verify we do not overflow one
6958 dimension of a multi-dimensional array. */
6959 if (mdim)
6960 {
6961 tree tmp;
6962
6963 if (!TYPE_MIN_VALUE (domain)
6964 || !TYPE_MAX_VALUE (domain)
6965 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
6966 goto cont;
6967
6968 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
6969 fold_convert_loc (loc, itype,
6970 TYPE_MIN_VALUE (domain)),
6971 fold_convert_loc (loc, itype, delta));
6972 if (TREE_CODE (tmp) != INTEGER_CST
6973 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
6974 goto cont;
6975 }
6976
6977 /* We found a suitable component reference. */
6978
6979 pref = TREE_OPERAND (addr, 0);
6980 ret = copy_node (pref);
6981 SET_EXPR_LOCATION (ret, loc);
6982
6983 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
6984 fold_build2_loc
6985 (loc, PLUS_EXPR, itype,
6986 fold_convert_loc (loc, itype,
6987 TYPE_MIN_VALUE
6988 (TYPE_DOMAIN (TREE_TYPE (ref)))),
6989 fold_convert_loc (loc, itype, delta)),
6990 NULL_TREE, NULL_TREE);
6991 return build_fold_addr_expr_loc (loc, ret);
6992 }
6993
6994 cont:
6995
6996 for (;; ref = TREE_OPERAND (ref, 0))
6997 {
6998 if (TREE_CODE (ref) == ARRAY_REF)
6999 {
7000 tree domain;
7001
7002 /* Remember if this was a multi-dimensional array. */
7003 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7004 mdim = true;
7005
7006 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7007 if (! domain)
7008 continue;
7009 itype = TREE_TYPE (domain);
7010
7011 step = array_ref_element_size (ref);
7012 if (TREE_CODE (step) != INTEGER_CST)
7013 continue;
7014
7015 if (s)
7016 {
7017 if (! tree_int_cst_equal (step, s))
7018 continue;
7019 }
7020 else
7021 {
7022 /* Try if delta is a multiple of step. */
7023 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7024 if (! tmp)
7025 continue;
7026 delta = tmp;
7027 }
7028
7029 /* Only fold here if we can verify we do not overflow one
7030 dimension of a multi-dimensional array. */
7031 if (mdim)
7032 {
7033 tree tmp;
7034
7035 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7036 || !TYPE_MAX_VALUE (domain)
7037 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7038 continue;
7039
7040 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7041 fold_convert_loc (loc, itype,
7042 TREE_OPERAND (ref, 1)),
7043 fold_convert_loc (loc, itype, delta));
7044 if (!tmp
7045 || TREE_CODE (tmp) != INTEGER_CST
7046 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7047 continue;
7048 }
7049
7050 break;
7051 }
7052 else
7053 mdim = false;
7054
7055 if (!handled_component_p (ref))
7056 return NULL_TREE;
7057 }
7058
7059 /* We found the suitable array reference. So copy everything up to it,
7060 and replace the index. */
7061
7062 pref = TREE_OPERAND (addr, 0);
7063 ret = copy_node (pref);
7064 SET_EXPR_LOCATION (ret, loc);
7065 pos = ret;
7066
7067 while (pref != ref)
7068 {
7069 pref = TREE_OPERAND (pref, 0);
7070 TREE_OPERAND (pos, 0) = copy_node (pref);
7071 pos = TREE_OPERAND (pos, 0);
7072 }
7073
7074 TREE_OPERAND (pos, 1)
7075 = fold_build2_loc (loc, PLUS_EXPR, itype,
7076 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7077 fold_convert_loc (loc, itype, delta));
7078 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7079 }
7080
7081
7082 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7083 means A >= Y && A != MAX, but in this case we know that
7084 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7085
7086 static tree
7087 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7088 {
7089 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7090
7091 if (TREE_CODE (bound) == LT_EXPR)
7092 a = TREE_OPERAND (bound, 0);
7093 else if (TREE_CODE (bound) == GT_EXPR)
7094 a = TREE_OPERAND (bound, 1);
7095 else
7096 return NULL_TREE;
7097
7098 typea = TREE_TYPE (a);
7099 if (!INTEGRAL_TYPE_P (typea)
7100 && !POINTER_TYPE_P (typea))
7101 return NULL_TREE;
7102
7103 if (TREE_CODE (ineq) == LT_EXPR)
7104 {
7105 a1 = TREE_OPERAND (ineq, 1);
7106 y = TREE_OPERAND (ineq, 0);
7107 }
7108 else if (TREE_CODE (ineq) == GT_EXPR)
7109 {
7110 a1 = TREE_OPERAND (ineq, 0);
7111 y = TREE_OPERAND (ineq, 1);
7112 }
7113 else
7114 return NULL_TREE;
7115
7116 if (TREE_TYPE (a1) != typea)
7117 return NULL_TREE;
7118
7119 if (POINTER_TYPE_P (typea))
7120 {
7121 /* Convert the pointer types into integer before taking the difference. */
7122 tree ta = fold_convert_loc (loc, ssizetype, a);
7123 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7124 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7125 }
7126 else
7127 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7128
7129 if (!diff || !integer_onep (diff))
7130 return NULL_TREE;
7131
7132 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7133 }
7134
7135 /* Fold a sum or difference of at least one multiplication.
7136 Returns the folded tree or NULL if no simplification could be made. */
7137
7138 static tree
7139 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7140 tree arg0, tree arg1)
7141 {
7142 tree arg00, arg01, arg10, arg11;
7143 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7144
7145 /* (A * C) +- (B * C) -> (A+-B) * C.
7146 (A * C) +- A -> A * (C+-1).
7147 We are most concerned about the case where C is a constant,
7148 but other combinations show up during loop reduction. Since
7149 it is not difficult, try all four possibilities. */
7150
7151 if (TREE_CODE (arg0) == MULT_EXPR)
7152 {
7153 arg00 = TREE_OPERAND (arg0, 0);
7154 arg01 = TREE_OPERAND (arg0, 1);
7155 }
7156 else if (TREE_CODE (arg0) == INTEGER_CST)
7157 {
7158 arg00 = build_one_cst (type);
7159 arg01 = arg0;
7160 }
7161 else
7162 {
7163 /* We cannot generate constant 1 for fract. */
7164 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7165 return NULL_TREE;
7166 arg00 = arg0;
7167 arg01 = build_one_cst (type);
7168 }
7169 if (TREE_CODE (arg1) == MULT_EXPR)
7170 {
7171 arg10 = TREE_OPERAND (arg1, 0);
7172 arg11 = TREE_OPERAND (arg1, 1);
7173 }
7174 else if (TREE_CODE (arg1) == INTEGER_CST)
7175 {
7176 arg10 = build_one_cst (type);
7177 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7178 the purpose of this canonicalization. */
7179 if (TREE_INT_CST_HIGH (arg1) == -1
7180 && negate_expr_p (arg1)
7181 && code == PLUS_EXPR)
7182 {
7183 arg11 = negate_expr (arg1);
7184 code = MINUS_EXPR;
7185 }
7186 else
7187 arg11 = arg1;
7188 }
7189 else
7190 {
7191 /* We cannot generate constant 1 for fract. */
7192 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7193 return NULL_TREE;
7194 arg10 = arg1;
7195 arg11 = build_one_cst (type);
7196 }
7197 same = NULL_TREE;
7198
7199 if (operand_equal_p (arg01, arg11, 0))
7200 same = arg01, alt0 = arg00, alt1 = arg10;
7201 else if (operand_equal_p (arg00, arg10, 0))
7202 same = arg00, alt0 = arg01, alt1 = arg11;
7203 else if (operand_equal_p (arg00, arg11, 0))
7204 same = arg00, alt0 = arg01, alt1 = arg10;
7205 else if (operand_equal_p (arg01, arg10, 0))
7206 same = arg01, alt0 = arg00, alt1 = arg11;
7207
7208 /* No identical multiplicands; see if we can find a common
7209 power-of-two factor in non-power-of-two multiplies. This
7210 can help in multi-dimensional array access. */
7211 else if (host_integerp (arg01, 0)
7212 && host_integerp (arg11, 0))
7213 {
7214 HOST_WIDE_INT int01, int11, tmp;
7215 bool swap = false;
7216 tree maybe_same;
7217 int01 = TREE_INT_CST_LOW (arg01);
7218 int11 = TREE_INT_CST_LOW (arg11);
7219
7220 /* Move min of absolute values to int11. */
7221 if (absu_hwi (int01) < absu_hwi (int11))
7222 {
7223 tmp = int01, int01 = int11, int11 = tmp;
7224 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7225 maybe_same = arg01;
7226 swap = true;
7227 }
7228 else
7229 maybe_same = arg11;
7230
7231 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7232 /* The remainder should not be a constant, otherwise we
7233 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7234 increased the number of multiplications necessary. */
7235 && TREE_CODE (arg10) != INTEGER_CST)
7236 {
7237 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7238 build_int_cst (TREE_TYPE (arg00),
7239 int01 / int11));
7240 alt1 = arg10;
7241 same = maybe_same;
7242 if (swap)
7243 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7244 }
7245 }
7246
7247 if (same)
7248 return fold_build2_loc (loc, MULT_EXPR, type,
7249 fold_build2_loc (loc, code, type,
7250 fold_convert_loc (loc, type, alt0),
7251 fold_convert_loc (loc, type, alt1)),
7252 fold_convert_loc (loc, type, same));
7253
7254 return NULL_TREE;
7255 }
7256
7257 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7258 specified by EXPR into the buffer PTR of length LEN bytes.
7259 Return the number of bytes placed in the buffer, or zero
7260 upon failure. */
7261
7262 static int
7263 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7264 {
7265 tree type = TREE_TYPE (expr);
7266 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7267 int byte, offset, word, words;
7268 unsigned char value;
7269
7270 if (total_bytes > len)
7271 return 0;
7272 words = total_bytes / UNITS_PER_WORD;
7273
7274 for (byte = 0; byte < total_bytes; byte++)
7275 {
7276 int bitpos = byte * BITS_PER_UNIT;
7277 if (bitpos < HOST_BITS_PER_WIDE_INT)
7278 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7279 else
7280 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7281 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7282
7283 if (total_bytes > UNITS_PER_WORD)
7284 {
7285 word = byte / UNITS_PER_WORD;
7286 if (WORDS_BIG_ENDIAN)
7287 word = (words - 1) - word;
7288 offset = word * UNITS_PER_WORD;
7289 if (BYTES_BIG_ENDIAN)
7290 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7291 else
7292 offset += byte % UNITS_PER_WORD;
7293 }
7294 else
7295 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7296 ptr[offset] = value;
7297 }
7298 return total_bytes;
7299 }
7300
7301
7302 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7303 specified by EXPR into the buffer PTR of length LEN bytes.
7304 Return the number of bytes placed in the buffer, or zero
7305 upon failure. */
7306
7307 static int
7308 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7309 {
7310 tree type = TREE_TYPE (expr);
7311 enum machine_mode mode = TYPE_MODE (type);
7312 int total_bytes = GET_MODE_SIZE (mode);
7313 FIXED_VALUE_TYPE value;
7314 tree i_value, i_type;
7315
7316 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7317 return 0;
7318
7319 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7320
7321 if (NULL_TREE == i_type
7322 || TYPE_PRECISION (i_type) != total_bytes)
7323 return 0;
7324
7325 value = TREE_FIXED_CST (expr);
7326 i_value = double_int_to_tree (i_type, value.data);
7327
7328 return native_encode_int (i_value, ptr, len);
7329 }
7330
7331
7332 /* Subroutine of native_encode_expr. Encode the REAL_CST
7333 specified by EXPR into the buffer PTR of length LEN bytes.
7334 Return the number of bytes placed in the buffer, or zero
7335 upon failure. */
7336
7337 static int
7338 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7339 {
7340 tree type = TREE_TYPE (expr);
7341 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7342 int byte, offset, word, words, bitpos;
7343 unsigned char value;
7344
7345 /* There are always 32 bits in each long, no matter the size of
7346 the hosts long. We handle floating point representations with
7347 up to 192 bits. */
7348 long tmp[6];
7349
7350 if (total_bytes > len)
7351 return 0;
7352 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7353
7354 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7355
7356 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7357 bitpos += BITS_PER_UNIT)
7358 {
7359 byte = (bitpos / BITS_PER_UNIT) & 3;
7360 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7361
7362 if (UNITS_PER_WORD < 4)
7363 {
7364 word = byte / UNITS_PER_WORD;
7365 if (WORDS_BIG_ENDIAN)
7366 word = (words - 1) - word;
7367 offset = word * UNITS_PER_WORD;
7368 if (BYTES_BIG_ENDIAN)
7369 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7370 else
7371 offset += byte % UNITS_PER_WORD;
7372 }
7373 else
7374 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7375 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7376 }
7377 return total_bytes;
7378 }
7379
7380 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7381 specified by EXPR into the buffer PTR of length LEN bytes.
7382 Return the number of bytes placed in the buffer, or zero
7383 upon failure. */
7384
7385 static int
7386 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7387 {
7388 int rsize, isize;
7389 tree part;
7390
7391 part = TREE_REALPART (expr);
7392 rsize = native_encode_expr (part, ptr, len);
7393 if (rsize == 0)
7394 return 0;
7395 part = TREE_IMAGPART (expr);
7396 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7397 if (isize != rsize)
7398 return 0;
7399 return rsize + isize;
7400 }
7401
7402
7403 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7404 specified by EXPR into the buffer PTR of length LEN bytes.
7405 Return the number of bytes placed in the buffer, or zero
7406 upon failure. */
7407
7408 static int
7409 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7410 {
7411 unsigned i, count;
7412 int size, offset;
7413 tree itype, elem;
7414
7415 offset = 0;
7416 count = VECTOR_CST_NELTS (expr);
7417 itype = TREE_TYPE (TREE_TYPE (expr));
7418 size = GET_MODE_SIZE (TYPE_MODE (itype));
7419 for (i = 0; i < count; i++)
7420 {
7421 elem = VECTOR_CST_ELT (expr, i);
7422 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7423 return 0;
7424 offset += size;
7425 }
7426 return offset;
7427 }
7428
7429
7430 /* Subroutine of native_encode_expr. Encode the STRING_CST
7431 specified by EXPR into the buffer PTR of length LEN bytes.
7432 Return the number of bytes placed in the buffer, or zero
7433 upon failure. */
7434
7435 static int
7436 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7437 {
7438 tree type = TREE_TYPE (expr);
7439 HOST_WIDE_INT total_bytes;
7440
7441 if (TREE_CODE (type) != ARRAY_TYPE
7442 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7443 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7444 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7445 return 0;
7446 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7447 if (total_bytes > len)
7448 return 0;
7449 if (TREE_STRING_LENGTH (expr) < total_bytes)
7450 {
7451 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7452 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7453 total_bytes - TREE_STRING_LENGTH (expr));
7454 }
7455 else
7456 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7457 return total_bytes;
7458 }
7459
7460
7461 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7462 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7463 buffer PTR of length LEN bytes. Return the number of bytes
7464 placed in the buffer, or zero upon failure. */
7465
7466 int
7467 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7468 {
7469 switch (TREE_CODE (expr))
7470 {
7471 case INTEGER_CST:
7472 return native_encode_int (expr, ptr, len);
7473
7474 case REAL_CST:
7475 return native_encode_real (expr, ptr, len);
7476
7477 case FIXED_CST:
7478 return native_encode_fixed (expr, ptr, len);
7479
7480 case COMPLEX_CST:
7481 return native_encode_complex (expr, ptr, len);
7482
7483 case VECTOR_CST:
7484 return native_encode_vector (expr, ptr, len);
7485
7486 case STRING_CST:
7487 return native_encode_string (expr, ptr, len);
7488
7489 default:
7490 return 0;
7491 }
7492 }
7493
7494
7495 /* Subroutine of native_interpret_expr. Interpret the contents of
7496 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7497 If the buffer cannot be interpreted, return NULL_TREE. */
7498
7499 static tree
7500 native_interpret_int (tree type, const unsigned char *ptr, int len)
7501 {
7502 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7503 double_int result;
7504
7505 if (total_bytes > len
7506 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7507 return NULL_TREE;
7508
7509 result = double_int::from_buffer (ptr, total_bytes);
7510
7511 return double_int_to_tree (type, result);
7512 }
7513
7514
7515 /* Subroutine of native_interpret_expr. Interpret the contents of
7516 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7517 If the buffer cannot be interpreted, return NULL_TREE. */
7518
7519 static tree
7520 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7521 {
7522 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7523 double_int result;
7524 FIXED_VALUE_TYPE fixed_value;
7525
7526 if (total_bytes > len
7527 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7528 return NULL_TREE;
7529
7530 result = double_int::from_buffer (ptr, total_bytes);
7531 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7532
7533 return build_fixed (type, fixed_value);
7534 }
7535
7536
7537 /* Subroutine of native_interpret_expr. Interpret the contents of
7538 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7539 If the buffer cannot be interpreted, return NULL_TREE. */
7540
7541 static tree
7542 native_interpret_real (tree type, const unsigned char *ptr, int len)
7543 {
7544 enum machine_mode mode = TYPE_MODE (type);
7545 int total_bytes = GET_MODE_SIZE (mode);
7546 int byte, offset, word, words, bitpos;
7547 unsigned char value;
7548 /* There are always 32 bits in each long, no matter the size of
7549 the hosts long. We handle floating point representations with
7550 up to 192 bits. */
7551 REAL_VALUE_TYPE r;
7552 long tmp[6];
7553
7554 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7555 if (total_bytes > len || total_bytes > 24)
7556 return NULL_TREE;
7557 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7558
7559 memset (tmp, 0, sizeof (tmp));
7560 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7561 bitpos += BITS_PER_UNIT)
7562 {
7563 byte = (bitpos / BITS_PER_UNIT) & 3;
7564 if (UNITS_PER_WORD < 4)
7565 {
7566 word = byte / UNITS_PER_WORD;
7567 if (WORDS_BIG_ENDIAN)
7568 word = (words - 1) - word;
7569 offset = word * UNITS_PER_WORD;
7570 if (BYTES_BIG_ENDIAN)
7571 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7572 else
7573 offset += byte % UNITS_PER_WORD;
7574 }
7575 else
7576 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7577 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7578
7579 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7580 }
7581
7582 real_from_target (&r, tmp, mode);
7583 return build_real (type, r);
7584 }
7585
7586
7587 /* Subroutine of native_interpret_expr. Interpret the contents of
7588 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7589 If the buffer cannot be interpreted, return NULL_TREE. */
7590
7591 static tree
7592 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7593 {
7594 tree etype, rpart, ipart;
7595 int size;
7596
7597 etype = TREE_TYPE (type);
7598 size = GET_MODE_SIZE (TYPE_MODE (etype));
7599 if (size * 2 > len)
7600 return NULL_TREE;
7601 rpart = native_interpret_expr (etype, ptr, size);
7602 if (!rpart)
7603 return NULL_TREE;
7604 ipart = native_interpret_expr (etype, ptr+size, size);
7605 if (!ipart)
7606 return NULL_TREE;
7607 return build_complex (type, rpart, ipart);
7608 }
7609
7610
7611 /* Subroutine of native_interpret_expr. Interpret the contents of
7612 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7613 If the buffer cannot be interpreted, return NULL_TREE. */
7614
7615 static tree
7616 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7617 {
7618 tree etype, elem;
7619 int i, size, count;
7620 tree *elements;
7621
7622 etype = TREE_TYPE (type);
7623 size = GET_MODE_SIZE (TYPE_MODE (etype));
7624 count = TYPE_VECTOR_SUBPARTS (type);
7625 if (size * count > len)
7626 return NULL_TREE;
7627
7628 elements = XALLOCAVEC (tree, count);
7629 for (i = count - 1; i >= 0; i--)
7630 {
7631 elem = native_interpret_expr (etype, ptr+(i*size), size);
7632 if (!elem)
7633 return NULL_TREE;
7634 elements[i] = elem;
7635 }
7636 return build_vector (type, elements);
7637 }
7638
7639
7640 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7641 the buffer PTR of length LEN as a constant of type TYPE. For
7642 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7643 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7644 return NULL_TREE. */
7645
7646 tree
7647 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7648 {
7649 switch (TREE_CODE (type))
7650 {
7651 case INTEGER_TYPE:
7652 case ENUMERAL_TYPE:
7653 case BOOLEAN_TYPE:
7654 case POINTER_TYPE:
7655 case REFERENCE_TYPE:
7656 return native_interpret_int (type, ptr, len);
7657
7658 case REAL_TYPE:
7659 return native_interpret_real (type, ptr, len);
7660
7661 case FIXED_POINT_TYPE:
7662 return native_interpret_fixed (type, ptr, len);
7663
7664 case COMPLEX_TYPE:
7665 return native_interpret_complex (type, ptr, len);
7666
7667 case VECTOR_TYPE:
7668 return native_interpret_vector (type, ptr, len);
7669
7670 default:
7671 return NULL_TREE;
7672 }
7673 }
7674
7675 /* Returns true if we can interpret the contents of a native encoding
7676 as TYPE. */
7677
7678 static bool
7679 can_native_interpret_type_p (tree type)
7680 {
7681 switch (TREE_CODE (type))
7682 {
7683 case INTEGER_TYPE:
7684 case ENUMERAL_TYPE:
7685 case BOOLEAN_TYPE:
7686 case POINTER_TYPE:
7687 case REFERENCE_TYPE:
7688 case FIXED_POINT_TYPE:
7689 case REAL_TYPE:
7690 case COMPLEX_TYPE:
7691 case VECTOR_TYPE:
7692 return true;
7693 default:
7694 return false;
7695 }
7696 }
7697
7698 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7699 TYPE at compile-time. If we're unable to perform the conversion
7700 return NULL_TREE. */
7701
7702 static tree
7703 fold_view_convert_expr (tree type, tree expr)
7704 {
7705 /* We support up to 512-bit values (for V8DFmode). */
7706 unsigned char buffer[64];
7707 int len;
7708
7709 /* Check that the host and target are sane. */
7710 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7711 return NULL_TREE;
7712
7713 len = native_encode_expr (expr, buffer, sizeof (buffer));
7714 if (len == 0)
7715 return NULL_TREE;
7716
7717 return native_interpret_expr (type, buffer, len);
7718 }
7719
7720 /* Build an expression for the address of T. Folds away INDIRECT_REF
7721 to avoid confusing the gimplify process. */
7722
7723 tree
7724 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7725 {
7726 /* The size of the object is not relevant when talking about its address. */
7727 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7728 t = TREE_OPERAND (t, 0);
7729
7730 if (TREE_CODE (t) == INDIRECT_REF)
7731 {
7732 t = TREE_OPERAND (t, 0);
7733
7734 if (TREE_TYPE (t) != ptrtype)
7735 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7736 }
7737 else if (TREE_CODE (t) == MEM_REF
7738 && integer_zerop (TREE_OPERAND (t, 1)))
7739 return TREE_OPERAND (t, 0);
7740 else if (TREE_CODE (t) == MEM_REF
7741 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7742 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7743 TREE_OPERAND (t, 0),
7744 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7745 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7746 {
7747 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7748
7749 if (TREE_TYPE (t) != ptrtype)
7750 t = fold_convert_loc (loc, ptrtype, t);
7751 }
7752 else
7753 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7754
7755 return t;
7756 }
7757
7758 /* Build an expression for the address of T. */
7759
7760 tree
7761 build_fold_addr_expr_loc (location_t loc, tree t)
7762 {
7763 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7764
7765 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7766 }
7767
7768 static bool vec_cst_ctor_to_array (tree, tree *);
7769
7770 /* Fold a unary expression of code CODE and type TYPE with operand
7771 OP0. Return the folded expression if folding is successful.
7772 Otherwise, return NULL_TREE. */
7773
7774 tree
7775 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7776 {
7777 tree tem;
7778 tree arg0;
7779 enum tree_code_class kind = TREE_CODE_CLASS (code);
7780
7781 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7782 && TREE_CODE_LENGTH (code) == 1);
7783
7784 arg0 = op0;
7785 if (arg0)
7786 {
7787 if (CONVERT_EXPR_CODE_P (code)
7788 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7789 {
7790 /* Don't use STRIP_NOPS, because signedness of argument type
7791 matters. */
7792 STRIP_SIGN_NOPS (arg0);
7793 }
7794 else
7795 {
7796 /* Strip any conversions that don't change the mode. This
7797 is safe for every expression, except for a comparison
7798 expression because its signedness is derived from its
7799 operands.
7800
7801 Note that this is done as an internal manipulation within
7802 the constant folder, in order to find the simplest
7803 representation of the arguments so that their form can be
7804 studied. In any cases, the appropriate type conversions
7805 should be put back in the tree that will get out of the
7806 constant folder. */
7807 STRIP_NOPS (arg0);
7808 }
7809 }
7810
7811 if (TREE_CODE_CLASS (code) == tcc_unary)
7812 {
7813 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7814 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7815 fold_build1_loc (loc, code, type,
7816 fold_convert_loc (loc, TREE_TYPE (op0),
7817 TREE_OPERAND (arg0, 1))));
7818 else if (TREE_CODE (arg0) == COND_EXPR)
7819 {
7820 tree arg01 = TREE_OPERAND (arg0, 1);
7821 tree arg02 = TREE_OPERAND (arg0, 2);
7822 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7823 arg01 = fold_build1_loc (loc, code, type,
7824 fold_convert_loc (loc,
7825 TREE_TYPE (op0), arg01));
7826 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7827 arg02 = fold_build1_loc (loc, code, type,
7828 fold_convert_loc (loc,
7829 TREE_TYPE (op0), arg02));
7830 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7831 arg01, arg02);
7832
7833 /* If this was a conversion, and all we did was to move into
7834 inside the COND_EXPR, bring it back out. But leave it if
7835 it is a conversion from integer to integer and the
7836 result precision is no wider than a word since such a
7837 conversion is cheap and may be optimized away by combine,
7838 while it couldn't if it were outside the COND_EXPR. Then return
7839 so we don't get into an infinite recursion loop taking the
7840 conversion out and then back in. */
7841
7842 if ((CONVERT_EXPR_CODE_P (code)
7843 || code == NON_LVALUE_EXPR)
7844 && TREE_CODE (tem) == COND_EXPR
7845 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7846 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7847 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7848 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7849 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7850 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7851 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7852 && (INTEGRAL_TYPE_P
7853 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7854 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7855 || flag_syntax_only))
7856 tem = build1_loc (loc, code, type,
7857 build3 (COND_EXPR,
7858 TREE_TYPE (TREE_OPERAND
7859 (TREE_OPERAND (tem, 1), 0)),
7860 TREE_OPERAND (tem, 0),
7861 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7862 TREE_OPERAND (TREE_OPERAND (tem, 2),
7863 0)));
7864 return tem;
7865 }
7866 }
7867
7868 switch (code)
7869 {
7870 case PAREN_EXPR:
7871 /* Re-association barriers around constants and other re-association
7872 barriers can be removed. */
7873 if (CONSTANT_CLASS_P (op0)
7874 || TREE_CODE (op0) == PAREN_EXPR)
7875 return fold_convert_loc (loc, type, op0);
7876 return NULL_TREE;
7877
7878 CASE_CONVERT:
7879 case FLOAT_EXPR:
7880 case FIX_TRUNC_EXPR:
7881 if (TREE_TYPE (op0) == type)
7882 return op0;
7883
7884 if (COMPARISON_CLASS_P (op0))
7885 {
7886 /* If we have (type) (a CMP b) and type is an integral type, return
7887 new expression involving the new type. Canonicalize
7888 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7889 non-integral type.
7890 Do not fold the result as that would not simplify further, also
7891 folding again results in recursions. */
7892 if (TREE_CODE (type) == BOOLEAN_TYPE)
7893 return build2_loc (loc, TREE_CODE (op0), type,
7894 TREE_OPERAND (op0, 0),
7895 TREE_OPERAND (op0, 1));
7896 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7897 && TREE_CODE (type) != VECTOR_TYPE)
7898 return build3_loc (loc, COND_EXPR, type, op0,
7899 constant_boolean_node (true, type),
7900 constant_boolean_node (false, type));
7901 }
7902
7903 /* Handle cases of two conversions in a row. */
7904 if (CONVERT_EXPR_P (op0))
7905 {
7906 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7907 tree inter_type = TREE_TYPE (op0);
7908 int inside_int = INTEGRAL_TYPE_P (inside_type);
7909 int inside_ptr = POINTER_TYPE_P (inside_type);
7910 int inside_float = FLOAT_TYPE_P (inside_type);
7911 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7912 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7913 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7914 int inter_int = INTEGRAL_TYPE_P (inter_type);
7915 int inter_ptr = POINTER_TYPE_P (inter_type);
7916 int inter_float = FLOAT_TYPE_P (inter_type);
7917 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7918 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7919 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7920 int final_int = INTEGRAL_TYPE_P (type);
7921 int final_ptr = POINTER_TYPE_P (type);
7922 int final_float = FLOAT_TYPE_P (type);
7923 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7924 unsigned int final_prec = TYPE_PRECISION (type);
7925 int final_unsignedp = TYPE_UNSIGNED (type);
7926
7927 /* In addition to the cases of two conversions in a row
7928 handled below, if we are converting something to its own
7929 type via an object of identical or wider precision, neither
7930 conversion is needed. */
7931 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7932 && (((inter_int || inter_ptr) && final_int)
7933 || (inter_float && final_float))
7934 && inter_prec >= final_prec)
7935 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7936
7937 /* Likewise, if the intermediate and initial types are either both
7938 float or both integer, we don't need the middle conversion if the
7939 former is wider than the latter and doesn't change the signedness
7940 (for integers). Avoid this if the final type is a pointer since
7941 then we sometimes need the middle conversion. Likewise if the
7942 final type has a precision not equal to the size of its mode. */
7943 if (((inter_int && inside_int)
7944 || (inter_float && inside_float)
7945 || (inter_vec && inside_vec))
7946 && inter_prec >= inside_prec
7947 && (inter_float || inter_vec
7948 || inter_unsignedp == inside_unsignedp)
7949 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7950 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7951 && ! final_ptr
7952 && (! final_vec || inter_prec == inside_prec))
7953 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7954
7955 /* If we have a sign-extension of a zero-extended value, we can
7956 replace that by a single zero-extension. Likewise if the
7957 final conversion does not change precision we can drop the
7958 intermediate conversion. */
7959 if (inside_int && inter_int && final_int
7960 && ((inside_prec < inter_prec && inter_prec < final_prec
7961 && inside_unsignedp && !inter_unsignedp)
7962 || final_prec == inter_prec))
7963 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7964
7965 /* Two conversions in a row are not needed unless:
7966 - some conversion is floating-point (overstrict for now), or
7967 - some conversion is a vector (overstrict for now), or
7968 - the intermediate type is narrower than both initial and
7969 final, or
7970 - the intermediate type and innermost type differ in signedness,
7971 and the outermost type is wider than the intermediate, or
7972 - the initial type is a pointer type and the precisions of the
7973 intermediate and final types differ, or
7974 - the final type is a pointer type and the precisions of the
7975 initial and intermediate types differ. */
7976 if (! inside_float && ! inter_float && ! final_float
7977 && ! inside_vec && ! inter_vec && ! final_vec
7978 && (inter_prec >= inside_prec || inter_prec >= final_prec)
7979 && ! (inside_int && inter_int
7980 && inter_unsignedp != inside_unsignedp
7981 && inter_prec < final_prec)
7982 && ((inter_unsignedp && inter_prec > inside_prec)
7983 == (final_unsignedp && final_prec > inter_prec))
7984 && ! (inside_ptr && inter_prec != final_prec)
7985 && ! (final_ptr && inside_prec != inter_prec)
7986 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7987 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
7988 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7989 }
7990
7991 /* Handle (T *)&A.B.C for A being of type T and B and C
7992 living at offset zero. This occurs frequently in
7993 C++ upcasting and then accessing the base. */
7994 if (TREE_CODE (op0) == ADDR_EXPR
7995 && POINTER_TYPE_P (type)
7996 && handled_component_p (TREE_OPERAND (op0, 0)))
7997 {
7998 HOST_WIDE_INT bitsize, bitpos;
7999 tree offset;
8000 enum machine_mode mode;
8001 int unsignedp, volatilep;
8002 tree base = TREE_OPERAND (op0, 0);
8003 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8004 &mode, &unsignedp, &volatilep, false);
8005 /* If the reference was to a (constant) zero offset, we can use
8006 the address of the base if it has the same base type
8007 as the result type and the pointer type is unqualified. */
8008 if (! offset && bitpos == 0
8009 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8010 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8011 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8012 return fold_convert_loc (loc, type,
8013 build_fold_addr_expr_loc (loc, base));
8014 }
8015
8016 if (TREE_CODE (op0) == MODIFY_EXPR
8017 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8018 /* Detect assigning a bitfield. */
8019 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8020 && DECL_BIT_FIELD
8021 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8022 {
8023 /* Don't leave an assignment inside a conversion
8024 unless assigning a bitfield. */
8025 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8026 /* First do the assignment, then return converted constant. */
8027 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8028 TREE_NO_WARNING (tem) = 1;
8029 TREE_USED (tem) = 1;
8030 return tem;
8031 }
8032
8033 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8034 constants (if x has signed type, the sign bit cannot be set
8035 in c). This folds extension into the BIT_AND_EXPR.
8036 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8037 very likely don't have maximal range for their precision and this
8038 transformation effectively doesn't preserve non-maximal ranges. */
8039 if (TREE_CODE (type) == INTEGER_TYPE
8040 && TREE_CODE (op0) == BIT_AND_EXPR
8041 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8042 {
8043 tree and_expr = op0;
8044 tree and0 = TREE_OPERAND (and_expr, 0);
8045 tree and1 = TREE_OPERAND (and_expr, 1);
8046 int change = 0;
8047
8048 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8049 || (TYPE_PRECISION (type)
8050 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8051 change = 1;
8052 else if (TYPE_PRECISION (TREE_TYPE (and1))
8053 <= HOST_BITS_PER_WIDE_INT
8054 && host_integerp (and1, 1))
8055 {
8056 unsigned HOST_WIDE_INT cst;
8057
8058 cst = tree_low_cst (and1, 1);
8059 cst &= (HOST_WIDE_INT) -1
8060 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8061 change = (cst == 0);
8062 #ifdef LOAD_EXTEND_OP
8063 if (change
8064 && !flag_syntax_only
8065 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8066 == ZERO_EXTEND))
8067 {
8068 tree uns = unsigned_type_for (TREE_TYPE (and0));
8069 and0 = fold_convert_loc (loc, uns, and0);
8070 and1 = fold_convert_loc (loc, uns, and1);
8071 }
8072 #endif
8073 }
8074 if (change)
8075 {
8076 tem = force_fit_type_double (type, tree_to_double_int (and1),
8077 0, TREE_OVERFLOW (and1));
8078 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8079 fold_convert_loc (loc, type, and0), tem);
8080 }
8081 }
8082
8083 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8084 when one of the new casts will fold away. Conservatively we assume
8085 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8086 if (POINTER_TYPE_P (type)
8087 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8088 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8089 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8090 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8091 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8092 {
8093 tree arg00 = TREE_OPERAND (arg0, 0);
8094 tree arg01 = TREE_OPERAND (arg0, 1);
8095
8096 return fold_build_pointer_plus_loc
8097 (loc, fold_convert_loc (loc, type, arg00), arg01);
8098 }
8099
8100 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8101 of the same precision, and X is an integer type not narrower than
8102 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8103 if (INTEGRAL_TYPE_P (type)
8104 && TREE_CODE (op0) == BIT_NOT_EXPR
8105 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8106 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8107 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8108 {
8109 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8110 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8111 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8112 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8113 fold_convert_loc (loc, type, tem));
8114 }
8115
8116 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8117 type of X and Y (integer types only). */
8118 if (INTEGRAL_TYPE_P (type)
8119 && TREE_CODE (op0) == MULT_EXPR
8120 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8121 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8122 {
8123 /* Be careful not to introduce new overflows. */
8124 tree mult_type;
8125 if (TYPE_OVERFLOW_WRAPS (type))
8126 mult_type = type;
8127 else
8128 mult_type = unsigned_type_for (type);
8129
8130 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8131 {
8132 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8133 fold_convert_loc (loc, mult_type,
8134 TREE_OPERAND (op0, 0)),
8135 fold_convert_loc (loc, mult_type,
8136 TREE_OPERAND (op0, 1)));
8137 return fold_convert_loc (loc, type, tem);
8138 }
8139 }
8140
8141 tem = fold_convert_const (code, type, op0);
8142 return tem ? tem : NULL_TREE;
8143
8144 case ADDR_SPACE_CONVERT_EXPR:
8145 if (integer_zerop (arg0))
8146 return fold_convert_const (code, type, arg0);
8147 return NULL_TREE;
8148
8149 case FIXED_CONVERT_EXPR:
8150 tem = fold_convert_const (code, type, arg0);
8151 return tem ? tem : NULL_TREE;
8152
8153 case VIEW_CONVERT_EXPR:
8154 if (TREE_TYPE (op0) == type)
8155 return op0;
8156 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8157 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8158 type, TREE_OPERAND (op0, 0));
8159 if (TREE_CODE (op0) == MEM_REF)
8160 return fold_build2_loc (loc, MEM_REF, type,
8161 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8162
8163 /* For integral conversions with the same precision or pointer
8164 conversions use a NOP_EXPR instead. */
8165 if ((INTEGRAL_TYPE_P (type)
8166 || POINTER_TYPE_P (type))
8167 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8168 || POINTER_TYPE_P (TREE_TYPE (op0)))
8169 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8170 return fold_convert_loc (loc, type, op0);
8171
8172 /* Strip inner integral conversions that do not change the precision. */
8173 if (CONVERT_EXPR_P (op0)
8174 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8175 || POINTER_TYPE_P (TREE_TYPE (op0)))
8176 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8177 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8178 && (TYPE_PRECISION (TREE_TYPE (op0))
8179 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8180 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8181 type, TREE_OPERAND (op0, 0));
8182
8183 return fold_view_convert_expr (type, op0);
8184
8185 case NEGATE_EXPR:
8186 tem = fold_negate_expr (loc, arg0);
8187 if (tem)
8188 return fold_convert_loc (loc, type, tem);
8189 return NULL_TREE;
8190
8191 case ABS_EXPR:
8192 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8193 return fold_abs_const (arg0, type);
8194 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8195 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8196 /* Convert fabs((double)float) into (double)fabsf(float). */
8197 else if (TREE_CODE (arg0) == NOP_EXPR
8198 && TREE_CODE (type) == REAL_TYPE)
8199 {
8200 tree targ0 = strip_float_extensions (arg0);
8201 if (targ0 != arg0)
8202 return fold_convert_loc (loc, type,
8203 fold_build1_loc (loc, ABS_EXPR,
8204 TREE_TYPE (targ0),
8205 targ0));
8206 }
8207 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8208 else if (TREE_CODE (arg0) == ABS_EXPR)
8209 return arg0;
8210 else if (tree_expr_nonnegative_p (arg0))
8211 return arg0;
8212
8213 /* Strip sign ops from argument. */
8214 if (TREE_CODE (type) == REAL_TYPE)
8215 {
8216 tem = fold_strip_sign_ops (arg0);
8217 if (tem)
8218 return fold_build1_loc (loc, ABS_EXPR, type,
8219 fold_convert_loc (loc, type, tem));
8220 }
8221 return NULL_TREE;
8222
8223 case CONJ_EXPR:
8224 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8225 return fold_convert_loc (loc, type, arg0);
8226 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8227 {
8228 tree itype = TREE_TYPE (type);
8229 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8230 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8231 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8232 negate_expr (ipart));
8233 }
8234 if (TREE_CODE (arg0) == COMPLEX_CST)
8235 {
8236 tree itype = TREE_TYPE (type);
8237 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8238 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8239 return build_complex (type, rpart, negate_expr (ipart));
8240 }
8241 if (TREE_CODE (arg0) == CONJ_EXPR)
8242 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8243 return NULL_TREE;
8244
8245 case BIT_NOT_EXPR:
8246 if (TREE_CODE (arg0) == INTEGER_CST)
8247 return fold_not_const (arg0, type);
8248 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8249 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8250 /* Convert ~ (-A) to A - 1. */
8251 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8252 return fold_build2_loc (loc, MINUS_EXPR, type,
8253 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8254 build_int_cst (type, 1));
8255 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8256 else if (INTEGRAL_TYPE_P (type)
8257 && ((TREE_CODE (arg0) == MINUS_EXPR
8258 && integer_onep (TREE_OPERAND (arg0, 1)))
8259 || (TREE_CODE (arg0) == PLUS_EXPR
8260 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8261 return fold_build1_loc (loc, NEGATE_EXPR, type,
8262 fold_convert_loc (loc, type,
8263 TREE_OPERAND (arg0, 0)));
8264 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8265 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8266 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8267 fold_convert_loc (loc, type,
8268 TREE_OPERAND (arg0, 0)))))
8269 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8270 fold_convert_loc (loc, type,
8271 TREE_OPERAND (arg0, 1)));
8272 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8273 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8274 fold_convert_loc (loc, type,
8275 TREE_OPERAND (arg0, 1)))))
8276 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8277 fold_convert_loc (loc, type,
8278 TREE_OPERAND (arg0, 0)), tem);
8279 /* Perform BIT_NOT_EXPR on each element individually. */
8280 else if (TREE_CODE (arg0) == VECTOR_CST)
8281 {
8282 tree *elements;
8283 tree elem;
8284 unsigned count = VECTOR_CST_NELTS (arg0), i;
8285
8286 elements = XALLOCAVEC (tree, count);
8287 for (i = 0; i < count; i++)
8288 {
8289 elem = VECTOR_CST_ELT (arg0, i);
8290 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8291 if (elem == NULL_TREE)
8292 break;
8293 elements[i] = elem;
8294 }
8295 if (i == count)
8296 return build_vector (type, elements);
8297 }
8298 else if (COMPARISON_CLASS_P (arg0)
8299 && (VECTOR_TYPE_P (type)
8300 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8301 {
8302 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8303 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8304 HONOR_NANS (TYPE_MODE (op_type)));
8305 if (subcode != ERROR_MARK)
8306 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8307 TREE_OPERAND (arg0, 1));
8308 }
8309
8310
8311 return NULL_TREE;
8312
8313 case TRUTH_NOT_EXPR:
8314 /* Note that the operand of this must be an int
8315 and its values must be 0 or 1.
8316 ("true" is a fixed value perhaps depending on the language,
8317 but we don't handle values other than 1 correctly yet.) */
8318 tem = fold_truth_not_expr (loc, arg0);
8319 if (!tem)
8320 return NULL_TREE;
8321 return fold_convert_loc (loc, type, tem);
8322
8323 case REALPART_EXPR:
8324 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8325 return fold_convert_loc (loc, type, arg0);
8326 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8327 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8328 TREE_OPERAND (arg0, 1));
8329 if (TREE_CODE (arg0) == COMPLEX_CST)
8330 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8331 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8332 {
8333 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8334 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8335 fold_build1_loc (loc, REALPART_EXPR, itype,
8336 TREE_OPERAND (arg0, 0)),
8337 fold_build1_loc (loc, REALPART_EXPR, itype,
8338 TREE_OPERAND (arg0, 1)));
8339 return fold_convert_loc (loc, type, tem);
8340 }
8341 if (TREE_CODE (arg0) == CONJ_EXPR)
8342 {
8343 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8344 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8345 TREE_OPERAND (arg0, 0));
8346 return fold_convert_loc (loc, type, tem);
8347 }
8348 if (TREE_CODE (arg0) == CALL_EXPR)
8349 {
8350 tree fn = get_callee_fndecl (arg0);
8351 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8352 switch (DECL_FUNCTION_CODE (fn))
8353 {
8354 CASE_FLT_FN (BUILT_IN_CEXPI):
8355 fn = mathfn_built_in (type, BUILT_IN_COS);
8356 if (fn)
8357 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8358 break;
8359
8360 default:
8361 break;
8362 }
8363 }
8364 return NULL_TREE;
8365
8366 case IMAGPART_EXPR:
8367 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8368 return build_zero_cst (type);
8369 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8370 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8371 TREE_OPERAND (arg0, 0));
8372 if (TREE_CODE (arg0) == COMPLEX_CST)
8373 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8374 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8375 {
8376 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8377 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8378 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8379 TREE_OPERAND (arg0, 0)),
8380 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8381 TREE_OPERAND (arg0, 1)));
8382 return fold_convert_loc (loc, type, tem);
8383 }
8384 if (TREE_CODE (arg0) == CONJ_EXPR)
8385 {
8386 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8387 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8388 return fold_convert_loc (loc, type, negate_expr (tem));
8389 }
8390 if (TREE_CODE (arg0) == CALL_EXPR)
8391 {
8392 tree fn = get_callee_fndecl (arg0);
8393 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8394 switch (DECL_FUNCTION_CODE (fn))
8395 {
8396 CASE_FLT_FN (BUILT_IN_CEXPI):
8397 fn = mathfn_built_in (type, BUILT_IN_SIN);
8398 if (fn)
8399 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8400 break;
8401
8402 default:
8403 break;
8404 }
8405 }
8406 return NULL_TREE;
8407
8408 case INDIRECT_REF:
8409 /* Fold *&X to X if X is an lvalue. */
8410 if (TREE_CODE (op0) == ADDR_EXPR)
8411 {
8412 tree op00 = TREE_OPERAND (op0, 0);
8413 if ((TREE_CODE (op00) == VAR_DECL
8414 || TREE_CODE (op00) == PARM_DECL
8415 || TREE_CODE (op00) == RESULT_DECL)
8416 && !TREE_READONLY (op00))
8417 return op00;
8418 }
8419 return NULL_TREE;
8420
8421 case VEC_UNPACK_LO_EXPR:
8422 case VEC_UNPACK_HI_EXPR:
8423 case VEC_UNPACK_FLOAT_LO_EXPR:
8424 case VEC_UNPACK_FLOAT_HI_EXPR:
8425 {
8426 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8427 tree *elts;
8428 enum tree_code subcode;
8429
8430 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8431 if (TREE_CODE (arg0) != VECTOR_CST)
8432 return NULL_TREE;
8433
8434 elts = XALLOCAVEC (tree, nelts * 2);
8435 if (!vec_cst_ctor_to_array (arg0, elts))
8436 return NULL_TREE;
8437
8438 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8439 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8440 elts += nelts;
8441
8442 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8443 subcode = NOP_EXPR;
8444 else
8445 subcode = FLOAT_EXPR;
8446
8447 for (i = 0; i < nelts; i++)
8448 {
8449 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8450 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8451 return NULL_TREE;
8452 }
8453
8454 return build_vector (type, elts);
8455 }
8456
8457 case REDUC_MIN_EXPR:
8458 case REDUC_MAX_EXPR:
8459 case REDUC_PLUS_EXPR:
8460 {
8461 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8462 tree *elts;
8463 enum tree_code subcode;
8464
8465 if (TREE_CODE (op0) != VECTOR_CST)
8466 return NULL_TREE;
8467
8468 elts = XALLOCAVEC (tree, nelts);
8469 if (!vec_cst_ctor_to_array (op0, elts))
8470 return NULL_TREE;
8471
8472 switch (code)
8473 {
8474 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8475 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8476 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8477 default: gcc_unreachable ();
8478 }
8479
8480 for (i = 1; i < nelts; i++)
8481 {
8482 elts[0] = const_binop (subcode, elts[0], elts[i]);
8483 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8484 return NULL_TREE;
8485 elts[i] = build_zero_cst (TREE_TYPE (type));
8486 }
8487
8488 return build_vector (type, elts);
8489 }
8490
8491 default:
8492 return NULL_TREE;
8493 } /* switch (code) */
8494 }
8495
8496
8497 /* If the operation was a conversion do _not_ mark a resulting constant
8498 with TREE_OVERFLOW if the original constant was not. These conversions
8499 have implementation defined behavior and retaining the TREE_OVERFLOW
8500 flag here would confuse later passes such as VRP. */
8501 tree
8502 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8503 tree type, tree op0)
8504 {
8505 tree res = fold_unary_loc (loc, code, type, op0);
8506 if (res
8507 && TREE_CODE (res) == INTEGER_CST
8508 && TREE_CODE (op0) == INTEGER_CST
8509 && CONVERT_EXPR_CODE_P (code))
8510 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8511
8512 return res;
8513 }
8514
8515 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8516 operands OP0 and OP1. LOC is the location of the resulting expression.
8517 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8518 Return the folded expression if folding is successful. Otherwise,
8519 return NULL_TREE. */
8520 static tree
8521 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8522 tree arg0, tree arg1, tree op0, tree op1)
8523 {
8524 tree tem;
8525
8526 /* We only do these simplifications if we are optimizing. */
8527 if (!optimize)
8528 return NULL_TREE;
8529
8530 /* Check for things like (A || B) && (A || C). We can convert this
8531 to A || (B && C). Note that either operator can be any of the four
8532 truth and/or operations and the transformation will still be
8533 valid. Also note that we only care about order for the
8534 ANDIF and ORIF operators. If B contains side effects, this
8535 might change the truth-value of A. */
8536 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8537 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8538 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8539 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8540 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8541 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8542 {
8543 tree a00 = TREE_OPERAND (arg0, 0);
8544 tree a01 = TREE_OPERAND (arg0, 1);
8545 tree a10 = TREE_OPERAND (arg1, 0);
8546 tree a11 = TREE_OPERAND (arg1, 1);
8547 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8548 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8549 && (code == TRUTH_AND_EXPR
8550 || code == TRUTH_OR_EXPR));
8551
8552 if (operand_equal_p (a00, a10, 0))
8553 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8554 fold_build2_loc (loc, code, type, a01, a11));
8555 else if (commutative && operand_equal_p (a00, a11, 0))
8556 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8557 fold_build2_loc (loc, code, type, a01, a10));
8558 else if (commutative && operand_equal_p (a01, a10, 0))
8559 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8560 fold_build2_loc (loc, code, type, a00, a11));
8561
8562 /* This case if tricky because we must either have commutative
8563 operators or else A10 must not have side-effects. */
8564
8565 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8566 && operand_equal_p (a01, a11, 0))
8567 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8568 fold_build2_loc (loc, code, type, a00, a10),
8569 a01);
8570 }
8571
8572 /* See if we can build a range comparison. */
8573 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8574 return tem;
8575
8576 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8577 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8578 {
8579 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8580 if (tem)
8581 return fold_build2_loc (loc, code, type, tem, arg1);
8582 }
8583
8584 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8585 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8586 {
8587 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8588 if (tem)
8589 return fold_build2_loc (loc, code, type, arg0, tem);
8590 }
8591
8592 /* Check for the possibility of merging component references. If our
8593 lhs is another similar operation, try to merge its rhs with our
8594 rhs. Then try to merge our lhs and rhs. */
8595 if (TREE_CODE (arg0) == code
8596 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8597 TREE_OPERAND (arg0, 1), arg1)))
8598 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8599
8600 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8601 return tem;
8602
8603 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8604 && (code == TRUTH_AND_EXPR
8605 || code == TRUTH_ANDIF_EXPR
8606 || code == TRUTH_OR_EXPR
8607 || code == TRUTH_ORIF_EXPR))
8608 {
8609 enum tree_code ncode, icode;
8610
8611 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8612 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8613 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8614
8615 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8616 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8617 We don't want to pack more than two leafs to a non-IF AND/OR
8618 expression.
8619 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8620 equal to IF-CODE, then we don't want to add right-hand operand.
8621 If the inner right-hand side of left-hand operand has
8622 side-effects, or isn't simple, then we can't add to it,
8623 as otherwise we might destroy if-sequence. */
8624 if (TREE_CODE (arg0) == icode
8625 && simple_operand_p_2 (arg1)
8626 /* Needed for sequence points to handle trappings, and
8627 side-effects. */
8628 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8629 {
8630 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8631 arg1);
8632 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8633 tem);
8634 }
8635 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8636 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8637 else if (TREE_CODE (arg1) == icode
8638 && simple_operand_p_2 (arg0)
8639 /* Needed for sequence points to handle trappings, and
8640 side-effects. */
8641 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8642 {
8643 tem = fold_build2_loc (loc, ncode, type,
8644 arg0, TREE_OPERAND (arg1, 0));
8645 return fold_build2_loc (loc, icode, type, tem,
8646 TREE_OPERAND (arg1, 1));
8647 }
8648 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8649 into (A OR B).
8650 For sequence point consistancy, we need to check for trapping,
8651 and side-effects. */
8652 else if (code == icode && simple_operand_p_2 (arg0)
8653 && simple_operand_p_2 (arg1))
8654 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8655 }
8656
8657 return NULL_TREE;
8658 }
8659
8660 /* Fold a binary expression of code CODE and type TYPE with operands
8661 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8662 Return the folded expression if folding is successful. Otherwise,
8663 return NULL_TREE. */
8664
8665 static tree
8666 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8667 {
8668 enum tree_code compl_code;
8669
8670 if (code == MIN_EXPR)
8671 compl_code = MAX_EXPR;
8672 else if (code == MAX_EXPR)
8673 compl_code = MIN_EXPR;
8674 else
8675 gcc_unreachable ();
8676
8677 /* MIN (MAX (a, b), b) == b. */
8678 if (TREE_CODE (op0) == compl_code
8679 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8680 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8681
8682 /* MIN (MAX (b, a), b) == b. */
8683 if (TREE_CODE (op0) == compl_code
8684 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8685 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8686 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8687
8688 /* MIN (a, MAX (a, b)) == a. */
8689 if (TREE_CODE (op1) == compl_code
8690 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8691 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8692 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8693
8694 /* MIN (a, MAX (b, a)) == a. */
8695 if (TREE_CODE (op1) == compl_code
8696 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8697 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8698 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8699
8700 return NULL_TREE;
8701 }
8702
8703 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8704 by changing CODE to reduce the magnitude of constants involved in
8705 ARG0 of the comparison.
8706 Returns a canonicalized comparison tree if a simplification was
8707 possible, otherwise returns NULL_TREE.
8708 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8709 valid if signed overflow is undefined. */
8710
8711 static tree
8712 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8713 tree arg0, tree arg1,
8714 bool *strict_overflow_p)
8715 {
8716 enum tree_code code0 = TREE_CODE (arg0);
8717 tree t, cst0 = NULL_TREE;
8718 int sgn0;
8719 bool swap = false;
8720
8721 /* Match A +- CST code arg1 and CST code arg1. We can change the
8722 first form only if overflow is undefined. */
8723 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8724 /* In principle pointers also have undefined overflow behavior,
8725 but that causes problems elsewhere. */
8726 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8727 && (code0 == MINUS_EXPR
8728 || code0 == PLUS_EXPR)
8729 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8730 || code0 == INTEGER_CST))
8731 return NULL_TREE;
8732
8733 /* Identify the constant in arg0 and its sign. */
8734 if (code0 == INTEGER_CST)
8735 cst0 = arg0;
8736 else
8737 cst0 = TREE_OPERAND (arg0, 1);
8738 sgn0 = tree_int_cst_sgn (cst0);
8739
8740 /* Overflowed constants and zero will cause problems. */
8741 if (integer_zerop (cst0)
8742 || TREE_OVERFLOW (cst0))
8743 return NULL_TREE;
8744
8745 /* See if we can reduce the magnitude of the constant in
8746 arg0 by changing the comparison code. */
8747 if (code0 == INTEGER_CST)
8748 {
8749 /* CST <= arg1 -> CST-1 < arg1. */
8750 if (code == LE_EXPR && sgn0 == 1)
8751 code = LT_EXPR;
8752 /* -CST < arg1 -> -CST-1 <= arg1. */
8753 else if (code == LT_EXPR && sgn0 == -1)
8754 code = LE_EXPR;
8755 /* CST > arg1 -> CST-1 >= arg1. */
8756 else if (code == GT_EXPR && sgn0 == 1)
8757 code = GE_EXPR;
8758 /* -CST >= arg1 -> -CST-1 > arg1. */
8759 else if (code == GE_EXPR && sgn0 == -1)
8760 code = GT_EXPR;
8761 else
8762 return NULL_TREE;
8763 /* arg1 code' CST' might be more canonical. */
8764 swap = true;
8765 }
8766 else
8767 {
8768 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8769 if (code == LT_EXPR
8770 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8771 code = LE_EXPR;
8772 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8773 else if (code == GT_EXPR
8774 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8775 code = GE_EXPR;
8776 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8777 else if (code == LE_EXPR
8778 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8779 code = LT_EXPR;
8780 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8781 else if (code == GE_EXPR
8782 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8783 code = GT_EXPR;
8784 else
8785 return NULL_TREE;
8786 *strict_overflow_p = true;
8787 }
8788
8789 /* Now build the constant reduced in magnitude. But not if that
8790 would produce one outside of its types range. */
8791 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8792 && ((sgn0 == 1
8793 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8794 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8795 || (sgn0 == -1
8796 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8797 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8798 /* We cannot swap the comparison here as that would cause us to
8799 endlessly recurse. */
8800 return NULL_TREE;
8801
8802 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8803 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8804 if (code0 != INTEGER_CST)
8805 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8806 t = fold_convert (TREE_TYPE (arg1), t);
8807
8808 /* If swapping might yield to a more canonical form, do so. */
8809 if (swap)
8810 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8811 else
8812 return fold_build2_loc (loc, code, type, t, arg1);
8813 }
8814
8815 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8816 overflow further. Try to decrease the magnitude of constants involved
8817 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8818 and put sole constants at the second argument position.
8819 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8820
8821 static tree
8822 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8823 tree arg0, tree arg1)
8824 {
8825 tree t;
8826 bool strict_overflow_p;
8827 const char * const warnmsg = G_("assuming signed overflow does not occur "
8828 "when reducing constant in comparison");
8829
8830 /* Try canonicalization by simplifying arg0. */
8831 strict_overflow_p = false;
8832 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8833 &strict_overflow_p);
8834 if (t)
8835 {
8836 if (strict_overflow_p)
8837 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8838 return t;
8839 }
8840
8841 /* Try canonicalization by simplifying arg1 using the swapped
8842 comparison. */
8843 code = swap_tree_comparison (code);
8844 strict_overflow_p = false;
8845 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8846 &strict_overflow_p);
8847 if (t && strict_overflow_p)
8848 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8849 return t;
8850 }
8851
8852 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8853 space. This is used to avoid issuing overflow warnings for
8854 expressions like &p->x which can not wrap. */
8855
8856 static bool
8857 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8858 {
8859 double_int di_offset, total;
8860
8861 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8862 return true;
8863
8864 if (bitpos < 0)
8865 return true;
8866
8867 if (offset == NULL_TREE)
8868 di_offset = double_int_zero;
8869 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8870 return true;
8871 else
8872 di_offset = TREE_INT_CST (offset);
8873
8874 bool overflow;
8875 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8876 total = di_offset.add_with_sign (units, true, &overflow);
8877 if (overflow)
8878 return true;
8879
8880 if (total.high != 0)
8881 return true;
8882
8883 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8884 if (size <= 0)
8885 return true;
8886
8887 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8888 array. */
8889 if (TREE_CODE (base) == ADDR_EXPR)
8890 {
8891 HOST_WIDE_INT base_size;
8892
8893 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8894 if (base_size > 0 && size < base_size)
8895 size = base_size;
8896 }
8897
8898 return total.low > (unsigned HOST_WIDE_INT) size;
8899 }
8900
8901 /* Subroutine of fold_binary. This routine performs all of the
8902 transformations that are common to the equality/inequality
8903 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8904 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8905 fold_binary should call fold_binary. Fold a comparison with
8906 tree code CODE and type TYPE with operands OP0 and OP1. Return
8907 the folded comparison or NULL_TREE. */
8908
8909 static tree
8910 fold_comparison (location_t loc, enum tree_code code, tree type,
8911 tree op0, tree op1)
8912 {
8913 tree arg0, arg1, tem;
8914
8915 arg0 = op0;
8916 arg1 = op1;
8917
8918 STRIP_SIGN_NOPS (arg0);
8919 STRIP_SIGN_NOPS (arg1);
8920
8921 tem = fold_relational_const (code, type, arg0, arg1);
8922 if (tem != NULL_TREE)
8923 return tem;
8924
8925 /* If one arg is a real or integer constant, put it last. */
8926 if (tree_swap_operands_p (arg0, arg1, true))
8927 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8928
8929 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8930 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8931 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8932 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8933 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8934 && (TREE_CODE (arg1) == INTEGER_CST
8935 && !TREE_OVERFLOW (arg1)))
8936 {
8937 tree const1 = TREE_OPERAND (arg0, 1);
8938 tree const2 = arg1;
8939 tree variable = TREE_OPERAND (arg0, 0);
8940 tree lhs;
8941 int lhs_add;
8942 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
8943
8944 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
8945 TREE_TYPE (arg1), const2, const1);
8946
8947 /* If the constant operation overflowed this can be
8948 simplified as a comparison against INT_MAX/INT_MIN. */
8949 if (TREE_CODE (lhs) == INTEGER_CST
8950 && TREE_OVERFLOW (lhs))
8951 {
8952 int const1_sgn = tree_int_cst_sgn (const1);
8953 enum tree_code code2 = code;
8954
8955 /* Get the sign of the constant on the lhs if the
8956 operation were VARIABLE + CONST1. */
8957 if (TREE_CODE (arg0) == MINUS_EXPR)
8958 const1_sgn = -const1_sgn;
8959
8960 /* The sign of the constant determines if we overflowed
8961 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8962 Canonicalize to the INT_MIN overflow by swapping the comparison
8963 if necessary. */
8964 if (const1_sgn == -1)
8965 code2 = swap_tree_comparison (code);
8966
8967 /* We now can look at the canonicalized case
8968 VARIABLE + 1 CODE2 INT_MIN
8969 and decide on the result. */
8970 if (code2 == LT_EXPR
8971 || code2 == LE_EXPR
8972 || code2 == EQ_EXPR)
8973 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
8974 else if (code2 == NE_EXPR
8975 || code2 == GE_EXPR
8976 || code2 == GT_EXPR)
8977 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
8978 }
8979
8980 if (TREE_CODE (lhs) == TREE_CODE (arg1)
8981 && (TREE_CODE (lhs) != INTEGER_CST
8982 || !TREE_OVERFLOW (lhs)))
8983 {
8984 if (code != EQ_EXPR && code != NE_EXPR)
8985 fold_overflow_warning ("assuming signed overflow does not occur "
8986 "when changing X +- C1 cmp C2 to "
8987 "X cmp C1 +- C2",
8988 WARN_STRICT_OVERFLOW_COMPARISON);
8989 return fold_build2_loc (loc, code, type, variable, lhs);
8990 }
8991 }
8992
8993 /* For comparisons of pointers we can decompose it to a compile time
8994 comparison of the base objects and the offsets into the object.
8995 This requires at least one operand being an ADDR_EXPR or a
8996 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8997 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8998 && (TREE_CODE (arg0) == ADDR_EXPR
8999 || TREE_CODE (arg1) == ADDR_EXPR
9000 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9001 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9002 {
9003 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9004 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9005 enum machine_mode mode;
9006 int volatilep, unsignedp;
9007 bool indirect_base0 = false, indirect_base1 = false;
9008
9009 /* Get base and offset for the access. Strip ADDR_EXPR for
9010 get_inner_reference, but put it back by stripping INDIRECT_REF
9011 off the base object if possible. indirect_baseN will be true
9012 if baseN is not an address but refers to the object itself. */
9013 base0 = arg0;
9014 if (TREE_CODE (arg0) == ADDR_EXPR)
9015 {
9016 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9017 &bitsize, &bitpos0, &offset0, &mode,
9018 &unsignedp, &volatilep, false);
9019 if (TREE_CODE (base0) == INDIRECT_REF)
9020 base0 = TREE_OPERAND (base0, 0);
9021 else
9022 indirect_base0 = true;
9023 }
9024 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9025 {
9026 base0 = TREE_OPERAND (arg0, 0);
9027 STRIP_SIGN_NOPS (base0);
9028 if (TREE_CODE (base0) == ADDR_EXPR)
9029 {
9030 base0 = TREE_OPERAND (base0, 0);
9031 indirect_base0 = true;
9032 }
9033 offset0 = TREE_OPERAND (arg0, 1);
9034 if (host_integerp (offset0, 0))
9035 {
9036 HOST_WIDE_INT off = size_low_cst (offset0);
9037 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9038 * BITS_PER_UNIT)
9039 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9040 {
9041 bitpos0 = off * BITS_PER_UNIT;
9042 offset0 = NULL_TREE;
9043 }
9044 }
9045 }
9046
9047 base1 = arg1;
9048 if (TREE_CODE (arg1) == ADDR_EXPR)
9049 {
9050 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9051 &bitsize, &bitpos1, &offset1, &mode,
9052 &unsignedp, &volatilep, false);
9053 if (TREE_CODE (base1) == INDIRECT_REF)
9054 base1 = TREE_OPERAND (base1, 0);
9055 else
9056 indirect_base1 = true;
9057 }
9058 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9059 {
9060 base1 = TREE_OPERAND (arg1, 0);
9061 STRIP_SIGN_NOPS (base1);
9062 if (TREE_CODE (base1) == ADDR_EXPR)
9063 {
9064 base1 = TREE_OPERAND (base1, 0);
9065 indirect_base1 = true;
9066 }
9067 offset1 = TREE_OPERAND (arg1, 1);
9068 if (host_integerp (offset1, 0))
9069 {
9070 HOST_WIDE_INT off = size_low_cst (offset1);
9071 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9072 * BITS_PER_UNIT)
9073 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9074 {
9075 bitpos1 = off * BITS_PER_UNIT;
9076 offset1 = NULL_TREE;
9077 }
9078 }
9079 }
9080
9081 /* A local variable can never be pointed to by
9082 the default SSA name of an incoming parameter. */
9083 if ((TREE_CODE (arg0) == ADDR_EXPR
9084 && indirect_base0
9085 && TREE_CODE (base0) == VAR_DECL
9086 && auto_var_in_fn_p (base0, current_function_decl)
9087 && !indirect_base1
9088 && TREE_CODE (base1) == SSA_NAME
9089 && SSA_NAME_IS_DEFAULT_DEF (base1)
9090 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9091 || (TREE_CODE (arg1) == ADDR_EXPR
9092 && indirect_base1
9093 && TREE_CODE (base1) == VAR_DECL
9094 && auto_var_in_fn_p (base1, current_function_decl)
9095 && !indirect_base0
9096 && TREE_CODE (base0) == SSA_NAME
9097 && SSA_NAME_IS_DEFAULT_DEF (base0)
9098 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9099 {
9100 if (code == NE_EXPR)
9101 return constant_boolean_node (1, type);
9102 else if (code == EQ_EXPR)
9103 return constant_boolean_node (0, type);
9104 }
9105 /* If we have equivalent bases we might be able to simplify. */
9106 else if (indirect_base0 == indirect_base1
9107 && operand_equal_p (base0, base1, 0))
9108 {
9109 /* We can fold this expression to a constant if the non-constant
9110 offset parts are equal. */
9111 if ((offset0 == offset1
9112 || (offset0 && offset1
9113 && operand_equal_p (offset0, offset1, 0)))
9114 && (code == EQ_EXPR
9115 || code == NE_EXPR
9116 || (indirect_base0 && DECL_P (base0))
9117 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9118
9119 {
9120 if (code != EQ_EXPR
9121 && code != NE_EXPR
9122 && bitpos0 != bitpos1
9123 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9124 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9125 fold_overflow_warning (("assuming pointer wraparound does not "
9126 "occur when comparing P +- C1 with "
9127 "P +- C2"),
9128 WARN_STRICT_OVERFLOW_CONDITIONAL);
9129
9130 switch (code)
9131 {
9132 case EQ_EXPR:
9133 return constant_boolean_node (bitpos0 == bitpos1, type);
9134 case NE_EXPR:
9135 return constant_boolean_node (bitpos0 != bitpos1, type);
9136 case LT_EXPR:
9137 return constant_boolean_node (bitpos0 < bitpos1, type);
9138 case LE_EXPR:
9139 return constant_boolean_node (bitpos0 <= bitpos1, type);
9140 case GE_EXPR:
9141 return constant_boolean_node (bitpos0 >= bitpos1, type);
9142 case GT_EXPR:
9143 return constant_boolean_node (bitpos0 > bitpos1, type);
9144 default:;
9145 }
9146 }
9147 /* We can simplify the comparison to a comparison of the variable
9148 offset parts if the constant offset parts are equal.
9149 Be careful to use signed sizetype here because otherwise we
9150 mess with array offsets in the wrong way. This is possible
9151 because pointer arithmetic is restricted to retain within an
9152 object and overflow on pointer differences is undefined as of
9153 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9154 else if (bitpos0 == bitpos1
9155 && ((code == EQ_EXPR || code == NE_EXPR)
9156 || (indirect_base0 && DECL_P (base0))
9157 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9158 {
9159 /* By converting to signed sizetype we cover middle-end pointer
9160 arithmetic which operates on unsigned pointer types of size
9161 type size and ARRAY_REF offsets which are properly sign or
9162 zero extended from their type in case it is narrower than
9163 sizetype. */
9164 if (offset0 == NULL_TREE)
9165 offset0 = build_int_cst (ssizetype, 0);
9166 else
9167 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9168 if (offset1 == NULL_TREE)
9169 offset1 = build_int_cst (ssizetype, 0);
9170 else
9171 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9172
9173 if (code != EQ_EXPR
9174 && code != NE_EXPR
9175 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9176 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9177 fold_overflow_warning (("assuming pointer wraparound does not "
9178 "occur when comparing P +- C1 with "
9179 "P +- C2"),
9180 WARN_STRICT_OVERFLOW_COMPARISON);
9181
9182 return fold_build2_loc (loc, code, type, offset0, offset1);
9183 }
9184 }
9185 /* For non-equal bases we can simplify if they are addresses
9186 of local binding decls or constants. */
9187 else if (indirect_base0 && indirect_base1
9188 /* We know that !operand_equal_p (base0, base1, 0)
9189 because the if condition was false. But make
9190 sure two decls are not the same. */
9191 && base0 != base1
9192 && TREE_CODE (arg0) == ADDR_EXPR
9193 && TREE_CODE (arg1) == ADDR_EXPR
9194 && (((TREE_CODE (base0) == VAR_DECL
9195 || TREE_CODE (base0) == PARM_DECL)
9196 && (targetm.binds_local_p (base0)
9197 || CONSTANT_CLASS_P (base1)))
9198 || CONSTANT_CLASS_P (base0))
9199 && (((TREE_CODE (base1) == VAR_DECL
9200 || TREE_CODE (base1) == PARM_DECL)
9201 && (targetm.binds_local_p (base1)
9202 || CONSTANT_CLASS_P (base0)))
9203 || CONSTANT_CLASS_P (base1)))
9204 {
9205 if (code == EQ_EXPR)
9206 return omit_two_operands_loc (loc, type, boolean_false_node,
9207 arg0, arg1);
9208 else if (code == NE_EXPR)
9209 return omit_two_operands_loc (loc, type, boolean_true_node,
9210 arg0, arg1);
9211 }
9212 /* For equal offsets we can simplify to a comparison of the
9213 base addresses. */
9214 else if (bitpos0 == bitpos1
9215 && (indirect_base0
9216 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9217 && (indirect_base1
9218 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9219 && ((offset0 == offset1)
9220 || (offset0 && offset1
9221 && operand_equal_p (offset0, offset1, 0))))
9222 {
9223 if (indirect_base0)
9224 base0 = build_fold_addr_expr_loc (loc, base0);
9225 if (indirect_base1)
9226 base1 = build_fold_addr_expr_loc (loc, base1);
9227 return fold_build2_loc (loc, code, type, base0, base1);
9228 }
9229 }
9230
9231 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9232 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9233 the resulting offset is smaller in absolute value than the
9234 original one. */
9235 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9236 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9237 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9238 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9239 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9240 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9241 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9242 {
9243 tree const1 = TREE_OPERAND (arg0, 1);
9244 tree const2 = TREE_OPERAND (arg1, 1);
9245 tree variable1 = TREE_OPERAND (arg0, 0);
9246 tree variable2 = TREE_OPERAND (arg1, 0);
9247 tree cst;
9248 const char * const warnmsg = G_("assuming signed overflow does not "
9249 "occur when combining constants around "
9250 "a comparison");
9251
9252 /* Put the constant on the side where it doesn't overflow and is
9253 of lower absolute value than before. */
9254 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9255 ? MINUS_EXPR : PLUS_EXPR,
9256 const2, const1);
9257 if (!TREE_OVERFLOW (cst)
9258 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9259 {
9260 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9261 return fold_build2_loc (loc, code, type,
9262 variable1,
9263 fold_build2_loc (loc,
9264 TREE_CODE (arg1), TREE_TYPE (arg1),
9265 variable2, cst));
9266 }
9267
9268 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9269 ? MINUS_EXPR : PLUS_EXPR,
9270 const1, const2);
9271 if (!TREE_OVERFLOW (cst)
9272 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9273 {
9274 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9275 return fold_build2_loc (loc, code, type,
9276 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9277 variable1, cst),
9278 variable2);
9279 }
9280 }
9281
9282 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9283 signed arithmetic case. That form is created by the compiler
9284 often enough for folding it to be of value. One example is in
9285 computing loop trip counts after Operator Strength Reduction. */
9286 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9287 && TREE_CODE (arg0) == MULT_EXPR
9288 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9289 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9290 && integer_zerop (arg1))
9291 {
9292 tree const1 = TREE_OPERAND (arg0, 1);
9293 tree const2 = arg1; /* zero */
9294 tree variable1 = TREE_OPERAND (arg0, 0);
9295 enum tree_code cmp_code = code;
9296
9297 /* Handle unfolded multiplication by zero. */
9298 if (integer_zerop (const1))
9299 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9300
9301 fold_overflow_warning (("assuming signed overflow does not occur when "
9302 "eliminating multiplication in comparison "
9303 "with zero"),
9304 WARN_STRICT_OVERFLOW_COMPARISON);
9305
9306 /* If const1 is negative we swap the sense of the comparison. */
9307 if (tree_int_cst_sgn (const1) < 0)
9308 cmp_code = swap_tree_comparison (cmp_code);
9309
9310 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9311 }
9312
9313 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9314 if (tem)
9315 return tem;
9316
9317 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9318 {
9319 tree targ0 = strip_float_extensions (arg0);
9320 tree targ1 = strip_float_extensions (arg1);
9321 tree newtype = TREE_TYPE (targ0);
9322
9323 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9324 newtype = TREE_TYPE (targ1);
9325
9326 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9327 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9328 return fold_build2_loc (loc, code, type,
9329 fold_convert_loc (loc, newtype, targ0),
9330 fold_convert_loc (loc, newtype, targ1));
9331
9332 /* (-a) CMP (-b) -> b CMP a */
9333 if (TREE_CODE (arg0) == NEGATE_EXPR
9334 && TREE_CODE (arg1) == NEGATE_EXPR)
9335 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9336 TREE_OPERAND (arg0, 0));
9337
9338 if (TREE_CODE (arg1) == REAL_CST)
9339 {
9340 REAL_VALUE_TYPE cst;
9341 cst = TREE_REAL_CST (arg1);
9342
9343 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9344 if (TREE_CODE (arg0) == NEGATE_EXPR)
9345 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9346 TREE_OPERAND (arg0, 0),
9347 build_real (TREE_TYPE (arg1),
9348 real_value_negate (&cst)));
9349
9350 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9351 /* a CMP (-0) -> a CMP 0 */
9352 if (REAL_VALUE_MINUS_ZERO (cst))
9353 return fold_build2_loc (loc, code, type, arg0,
9354 build_real (TREE_TYPE (arg1), dconst0));
9355
9356 /* x != NaN is always true, other ops are always false. */
9357 if (REAL_VALUE_ISNAN (cst)
9358 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9359 {
9360 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9361 return omit_one_operand_loc (loc, type, tem, arg0);
9362 }
9363
9364 /* Fold comparisons against infinity. */
9365 if (REAL_VALUE_ISINF (cst)
9366 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9367 {
9368 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9369 if (tem != NULL_TREE)
9370 return tem;
9371 }
9372 }
9373
9374 /* If this is a comparison of a real constant with a PLUS_EXPR
9375 or a MINUS_EXPR of a real constant, we can convert it into a
9376 comparison with a revised real constant as long as no overflow
9377 occurs when unsafe_math_optimizations are enabled. */
9378 if (flag_unsafe_math_optimizations
9379 && TREE_CODE (arg1) == REAL_CST
9380 && (TREE_CODE (arg0) == PLUS_EXPR
9381 || TREE_CODE (arg0) == MINUS_EXPR)
9382 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9383 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9384 ? MINUS_EXPR : PLUS_EXPR,
9385 arg1, TREE_OPERAND (arg0, 1)))
9386 && !TREE_OVERFLOW (tem))
9387 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9388
9389 /* Likewise, we can simplify a comparison of a real constant with
9390 a MINUS_EXPR whose first operand is also a real constant, i.e.
9391 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9392 floating-point types only if -fassociative-math is set. */
9393 if (flag_associative_math
9394 && TREE_CODE (arg1) == REAL_CST
9395 && TREE_CODE (arg0) == MINUS_EXPR
9396 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9397 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9398 arg1))
9399 && !TREE_OVERFLOW (tem))
9400 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9401 TREE_OPERAND (arg0, 1), tem);
9402
9403 /* Fold comparisons against built-in math functions. */
9404 if (TREE_CODE (arg1) == REAL_CST
9405 && flag_unsafe_math_optimizations
9406 && ! flag_errno_math)
9407 {
9408 enum built_in_function fcode = builtin_mathfn_code (arg0);
9409
9410 if (fcode != END_BUILTINS)
9411 {
9412 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9413 if (tem != NULL_TREE)
9414 return tem;
9415 }
9416 }
9417 }
9418
9419 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9420 && CONVERT_EXPR_P (arg0))
9421 {
9422 /* If we are widening one operand of an integer comparison,
9423 see if the other operand is similarly being widened. Perhaps we
9424 can do the comparison in the narrower type. */
9425 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9426 if (tem)
9427 return tem;
9428
9429 /* Or if we are changing signedness. */
9430 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9431 if (tem)
9432 return tem;
9433 }
9434
9435 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9436 constant, we can simplify it. */
9437 if (TREE_CODE (arg1) == INTEGER_CST
9438 && (TREE_CODE (arg0) == MIN_EXPR
9439 || TREE_CODE (arg0) == MAX_EXPR)
9440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9441 {
9442 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9443 if (tem)
9444 return tem;
9445 }
9446
9447 /* Simplify comparison of something with itself. (For IEEE
9448 floating-point, we can only do some of these simplifications.) */
9449 if (operand_equal_p (arg0, arg1, 0))
9450 {
9451 switch (code)
9452 {
9453 case EQ_EXPR:
9454 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9455 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9456 return constant_boolean_node (1, type);
9457 break;
9458
9459 case GE_EXPR:
9460 case LE_EXPR:
9461 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9462 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9463 return constant_boolean_node (1, type);
9464 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9465
9466 case NE_EXPR:
9467 /* For NE, we can only do this simplification if integer
9468 or we don't honor IEEE floating point NaNs. */
9469 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9470 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9471 break;
9472 /* ... fall through ... */
9473 case GT_EXPR:
9474 case LT_EXPR:
9475 return constant_boolean_node (0, type);
9476 default:
9477 gcc_unreachable ();
9478 }
9479 }
9480
9481 /* If we are comparing an expression that just has comparisons
9482 of two integer values, arithmetic expressions of those comparisons,
9483 and constants, we can simplify it. There are only three cases
9484 to check: the two values can either be equal, the first can be
9485 greater, or the second can be greater. Fold the expression for
9486 those three values. Since each value must be 0 or 1, we have
9487 eight possibilities, each of which corresponds to the constant 0
9488 or 1 or one of the six possible comparisons.
9489
9490 This handles common cases like (a > b) == 0 but also handles
9491 expressions like ((x > y) - (y > x)) > 0, which supposedly
9492 occur in macroized code. */
9493
9494 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9495 {
9496 tree cval1 = 0, cval2 = 0;
9497 int save_p = 0;
9498
9499 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9500 /* Don't handle degenerate cases here; they should already
9501 have been handled anyway. */
9502 && cval1 != 0 && cval2 != 0
9503 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9504 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9505 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9506 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9507 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9508 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9509 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9510 {
9511 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9512 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9513
9514 /* We can't just pass T to eval_subst in case cval1 or cval2
9515 was the same as ARG1. */
9516
9517 tree high_result
9518 = fold_build2_loc (loc, code, type,
9519 eval_subst (loc, arg0, cval1, maxval,
9520 cval2, minval),
9521 arg1);
9522 tree equal_result
9523 = fold_build2_loc (loc, code, type,
9524 eval_subst (loc, arg0, cval1, maxval,
9525 cval2, maxval),
9526 arg1);
9527 tree low_result
9528 = fold_build2_loc (loc, code, type,
9529 eval_subst (loc, arg0, cval1, minval,
9530 cval2, maxval),
9531 arg1);
9532
9533 /* All three of these results should be 0 or 1. Confirm they are.
9534 Then use those values to select the proper code to use. */
9535
9536 if (TREE_CODE (high_result) == INTEGER_CST
9537 && TREE_CODE (equal_result) == INTEGER_CST
9538 && TREE_CODE (low_result) == INTEGER_CST)
9539 {
9540 /* Make a 3-bit mask with the high-order bit being the
9541 value for `>', the next for '=', and the low for '<'. */
9542 switch ((integer_onep (high_result) * 4)
9543 + (integer_onep (equal_result) * 2)
9544 + integer_onep (low_result))
9545 {
9546 case 0:
9547 /* Always false. */
9548 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9549 case 1:
9550 code = LT_EXPR;
9551 break;
9552 case 2:
9553 code = EQ_EXPR;
9554 break;
9555 case 3:
9556 code = LE_EXPR;
9557 break;
9558 case 4:
9559 code = GT_EXPR;
9560 break;
9561 case 5:
9562 code = NE_EXPR;
9563 break;
9564 case 6:
9565 code = GE_EXPR;
9566 break;
9567 case 7:
9568 /* Always true. */
9569 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9570 }
9571
9572 if (save_p)
9573 {
9574 tem = save_expr (build2 (code, type, cval1, cval2));
9575 SET_EXPR_LOCATION (tem, loc);
9576 return tem;
9577 }
9578 return fold_build2_loc (loc, code, type, cval1, cval2);
9579 }
9580 }
9581 }
9582
9583 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9584 into a single range test. */
9585 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9586 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9587 && TREE_CODE (arg1) == INTEGER_CST
9588 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9589 && !integer_zerop (TREE_OPERAND (arg0, 1))
9590 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9591 && !TREE_OVERFLOW (arg1))
9592 {
9593 tem = fold_div_compare (loc, code, type, arg0, arg1);
9594 if (tem != NULL_TREE)
9595 return tem;
9596 }
9597
9598 /* Fold ~X op ~Y as Y op X. */
9599 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9600 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9601 {
9602 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9603 return fold_build2_loc (loc, code, type,
9604 fold_convert_loc (loc, cmp_type,
9605 TREE_OPERAND (arg1, 0)),
9606 TREE_OPERAND (arg0, 0));
9607 }
9608
9609 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9610 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9611 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9612 {
9613 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9614 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9615 TREE_OPERAND (arg0, 0),
9616 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9617 fold_convert_loc (loc, cmp_type, arg1)));
9618 }
9619
9620 return NULL_TREE;
9621 }
9622
9623
9624 /* Subroutine of fold_binary. Optimize complex multiplications of the
9625 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9626 argument EXPR represents the expression "z" of type TYPE. */
9627
9628 static tree
9629 fold_mult_zconjz (location_t loc, tree type, tree expr)
9630 {
9631 tree itype = TREE_TYPE (type);
9632 tree rpart, ipart, tem;
9633
9634 if (TREE_CODE (expr) == COMPLEX_EXPR)
9635 {
9636 rpart = TREE_OPERAND (expr, 0);
9637 ipart = TREE_OPERAND (expr, 1);
9638 }
9639 else if (TREE_CODE (expr) == COMPLEX_CST)
9640 {
9641 rpart = TREE_REALPART (expr);
9642 ipart = TREE_IMAGPART (expr);
9643 }
9644 else
9645 {
9646 expr = save_expr (expr);
9647 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9648 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9649 }
9650
9651 rpart = save_expr (rpart);
9652 ipart = save_expr (ipart);
9653 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9654 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9655 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9656 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9657 build_zero_cst (itype));
9658 }
9659
9660
9661 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9662 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9663 guarantees that P and N have the same least significant log2(M) bits.
9664 N is not otherwise constrained. In particular, N is not normalized to
9665 0 <= N < M as is common. In general, the precise value of P is unknown.
9666 M is chosen as large as possible such that constant N can be determined.
9667
9668 Returns M and sets *RESIDUE to N.
9669
9670 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9671 account. This is not always possible due to PR 35705.
9672 */
9673
9674 static unsigned HOST_WIDE_INT
9675 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9676 bool allow_func_align)
9677 {
9678 enum tree_code code;
9679
9680 *residue = 0;
9681
9682 code = TREE_CODE (expr);
9683 if (code == ADDR_EXPR)
9684 {
9685 unsigned int bitalign;
9686 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9687 *residue /= BITS_PER_UNIT;
9688 return bitalign / BITS_PER_UNIT;
9689 }
9690 else if (code == POINTER_PLUS_EXPR)
9691 {
9692 tree op0, op1;
9693 unsigned HOST_WIDE_INT modulus;
9694 enum tree_code inner_code;
9695
9696 op0 = TREE_OPERAND (expr, 0);
9697 STRIP_NOPS (op0);
9698 modulus = get_pointer_modulus_and_residue (op0, residue,
9699 allow_func_align);
9700
9701 op1 = TREE_OPERAND (expr, 1);
9702 STRIP_NOPS (op1);
9703 inner_code = TREE_CODE (op1);
9704 if (inner_code == INTEGER_CST)
9705 {
9706 *residue += TREE_INT_CST_LOW (op1);
9707 return modulus;
9708 }
9709 else if (inner_code == MULT_EXPR)
9710 {
9711 op1 = TREE_OPERAND (op1, 1);
9712 if (TREE_CODE (op1) == INTEGER_CST)
9713 {
9714 unsigned HOST_WIDE_INT align;
9715
9716 /* Compute the greatest power-of-2 divisor of op1. */
9717 align = TREE_INT_CST_LOW (op1);
9718 align &= -align;
9719
9720 /* If align is non-zero and less than *modulus, replace
9721 *modulus with align., If align is 0, then either op1 is 0
9722 or the greatest power-of-2 divisor of op1 doesn't fit in an
9723 unsigned HOST_WIDE_INT. In either case, no additional
9724 constraint is imposed. */
9725 if (align)
9726 modulus = MIN (modulus, align);
9727
9728 return modulus;
9729 }
9730 }
9731 }
9732
9733 /* If we get here, we were unable to determine anything useful about the
9734 expression. */
9735 return 1;
9736 }
9737
9738 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9739 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9740
9741 static bool
9742 vec_cst_ctor_to_array (tree arg, tree *elts)
9743 {
9744 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9745
9746 if (TREE_CODE (arg) == VECTOR_CST)
9747 {
9748 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9749 elts[i] = VECTOR_CST_ELT (arg, i);
9750 }
9751 else if (TREE_CODE (arg) == CONSTRUCTOR)
9752 {
9753 constructor_elt *elt;
9754
9755 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9756 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9757 return false;
9758 else
9759 elts[i] = elt->value;
9760 }
9761 else
9762 return false;
9763 for (; i < nelts; i++)
9764 elts[i]
9765 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9766 return true;
9767 }
9768
9769 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9770 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9771 NULL_TREE otherwise. */
9772
9773 static tree
9774 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9775 {
9776 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9777 tree *elts;
9778 bool need_ctor = false;
9779
9780 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9781 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9782 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9783 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9784 return NULL_TREE;
9785
9786 elts = XALLOCAVEC (tree, nelts * 3);
9787 if (!vec_cst_ctor_to_array (arg0, elts)
9788 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9789 return NULL_TREE;
9790
9791 for (i = 0; i < nelts; i++)
9792 {
9793 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9794 need_ctor = true;
9795 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9796 }
9797
9798 if (need_ctor)
9799 {
9800 vec<constructor_elt, va_gc> *v;
9801 vec_alloc (v, nelts);
9802 for (i = 0; i < nelts; i++)
9803 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9804 return build_constructor (type, v);
9805 }
9806 else
9807 return build_vector (type, &elts[2 * nelts]);
9808 }
9809
9810 /* Try to fold a pointer difference of type TYPE two address expressions of
9811 array references AREF0 and AREF1 using location LOC. Return a
9812 simplified expression for the difference or NULL_TREE. */
9813
9814 static tree
9815 fold_addr_of_array_ref_difference (location_t loc, tree type,
9816 tree aref0, tree aref1)
9817 {
9818 tree base0 = TREE_OPERAND (aref0, 0);
9819 tree base1 = TREE_OPERAND (aref1, 0);
9820 tree base_offset = build_int_cst (type, 0);
9821
9822 /* If the bases are array references as well, recurse. If the bases
9823 are pointer indirections compute the difference of the pointers.
9824 If the bases are equal, we are set. */
9825 if ((TREE_CODE (base0) == ARRAY_REF
9826 && TREE_CODE (base1) == ARRAY_REF
9827 && (base_offset
9828 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9829 || (INDIRECT_REF_P (base0)
9830 && INDIRECT_REF_P (base1)
9831 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9832 TREE_OPERAND (base0, 0),
9833 TREE_OPERAND (base1, 0))))
9834 || operand_equal_p (base0, base1, 0))
9835 {
9836 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9837 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9838 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9839 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9840 return fold_build2_loc (loc, PLUS_EXPR, type,
9841 base_offset,
9842 fold_build2_loc (loc, MULT_EXPR, type,
9843 diff, esz));
9844 }
9845 return NULL_TREE;
9846 }
9847
9848 /* If the real or vector real constant CST of type TYPE has an exact
9849 inverse, return it, else return NULL. */
9850
9851 static tree
9852 exact_inverse (tree type, tree cst)
9853 {
9854 REAL_VALUE_TYPE r;
9855 tree unit_type, *elts;
9856 enum machine_mode mode;
9857 unsigned vec_nelts, i;
9858
9859 switch (TREE_CODE (cst))
9860 {
9861 case REAL_CST:
9862 r = TREE_REAL_CST (cst);
9863
9864 if (exact_real_inverse (TYPE_MODE (type), &r))
9865 return build_real (type, r);
9866
9867 return NULL_TREE;
9868
9869 case VECTOR_CST:
9870 vec_nelts = VECTOR_CST_NELTS (cst);
9871 elts = XALLOCAVEC (tree, vec_nelts);
9872 unit_type = TREE_TYPE (type);
9873 mode = TYPE_MODE (unit_type);
9874
9875 for (i = 0; i < vec_nelts; i++)
9876 {
9877 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9878 if (!exact_real_inverse (mode, &r))
9879 return NULL_TREE;
9880 elts[i] = build_real (unit_type, r);
9881 }
9882
9883 return build_vector (type, elts);
9884
9885 default:
9886 return NULL_TREE;
9887 }
9888 }
9889
9890 /* Fold a binary expression of code CODE and type TYPE with operands
9891 OP0 and OP1. LOC is the location of the resulting expression.
9892 Return the folded expression if folding is successful. Otherwise,
9893 return NULL_TREE. */
9894
9895 tree
9896 fold_binary_loc (location_t loc,
9897 enum tree_code code, tree type, tree op0, tree op1)
9898 {
9899 enum tree_code_class kind = TREE_CODE_CLASS (code);
9900 tree arg0, arg1, tem;
9901 tree t1 = NULL_TREE;
9902 bool strict_overflow_p;
9903 unsigned int prec;
9904
9905 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9906 && TREE_CODE_LENGTH (code) == 2
9907 && op0 != NULL_TREE
9908 && op1 != NULL_TREE);
9909
9910 arg0 = op0;
9911 arg1 = op1;
9912
9913 /* Strip any conversions that don't change the mode. This is
9914 safe for every expression, except for a comparison expression
9915 because its signedness is derived from its operands. So, in
9916 the latter case, only strip conversions that don't change the
9917 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9918 preserved.
9919
9920 Note that this is done as an internal manipulation within the
9921 constant folder, in order to find the simplest representation
9922 of the arguments so that their form can be studied. In any
9923 cases, the appropriate type conversions should be put back in
9924 the tree that will get out of the constant folder. */
9925
9926 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9927 {
9928 STRIP_SIGN_NOPS (arg0);
9929 STRIP_SIGN_NOPS (arg1);
9930 }
9931 else
9932 {
9933 STRIP_NOPS (arg0);
9934 STRIP_NOPS (arg1);
9935 }
9936
9937 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9938 constant but we can't do arithmetic on them. */
9939 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
9940 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
9941 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
9942 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
9943 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
9944 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
9945 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
9946 {
9947 if (kind == tcc_binary)
9948 {
9949 /* Make sure type and arg0 have the same saturating flag. */
9950 gcc_assert (TYPE_SATURATING (type)
9951 == TYPE_SATURATING (TREE_TYPE (arg0)));
9952 tem = const_binop (code, arg0, arg1);
9953 }
9954 else if (kind == tcc_comparison)
9955 tem = fold_relational_const (code, type, arg0, arg1);
9956 else
9957 tem = NULL_TREE;
9958
9959 if (tem != NULL_TREE)
9960 {
9961 if (TREE_TYPE (tem) != type)
9962 tem = fold_convert_loc (loc, type, tem);
9963 return tem;
9964 }
9965 }
9966
9967 /* If this is a commutative operation, and ARG0 is a constant, move it
9968 to ARG1 to reduce the number of tests below. */
9969 if (commutative_tree_code (code)
9970 && tree_swap_operands_p (arg0, arg1, true))
9971 return fold_build2_loc (loc, code, type, op1, op0);
9972
9973 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9974
9975 First check for cases where an arithmetic operation is applied to a
9976 compound, conditional, or comparison operation. Push the arithmetic
9977 operation inside the compound or conditional to see if any folding
9978 can then be done. Convert comparison to conditional for this purpose.
9979 The also optimizes non-constant cases that used to be done in
9980 expand_expr.
9981
9982 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9983 one of the operands is a comparison and the other is a comparison, a
9984 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9985 code below would make the expression more complex. Change it to a
9986 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9987 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9988
9989 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9990 || code == EQ_EXPR || code == NE_EXPR)
9991 && TREE_CODE (type) != VECTOR_TYPE
9992 && ((truth_value_p (TREE_CODE (arg0))
9993 && (truth_value_p (TREE_CODE (arg1))
9994 || (TREE_CODE (arg1) == BIT_AND_EXPR
9995 && integer_onep (TREE_OPERAND (arg1, 1)))))
9996 || (truth_value_p (TREE_CODE (arg1))
9997 && (truth_value_p (TREE_CODE (arg0))
9998 || (TREE_CODE (arg0) == BIT_AND_EXPR
9999 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10000 {
10001 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10002 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10003 : TRUTH_XOR_EXPR,
10004 boolean_type_node,
10005 fold_convert_loc (loc, boolean_type_node, arg0),
10006 fold_convert_loc (loc, boolean_type_node, arg1));
10007
10008 if (code == EQ_EXPR)
10009 tem = invert_truthvalue_loc (loc, tem);
10010
10011 return fold_convert_loc (loc, type, tem);
10012 }
10013
10014 if (TREE_CODE_CLASS (code) == tcc_binary
10015 || TREE_CODE_CLASS (code) == tcc_comparison)
10016 {
10017 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10018 {
10019 tem = fold_build2_loc (loc, code, type,
10020 fold_convert_loc (loc, TREE_TYPE (op0),
10021 TREE_OPERAND (arg0, 1)), op1);
10022 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10023 tem);
10024 }
10025 if (TREE_CODE (arg1) == COMPOUND_EXPR
10026 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10027 {
10028 tem = fold_build2_loc (loc, code, type, op0,
10029 fold_convert_loc (loc, TREE_TYPE (op1),
10030 TREE_OPERAND (arg1, 1)));
10031 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10032 tem);
10033 }
10034
10035 if (TREE_CODE (arg0) == COND_EXPR
10036 || TREE_CODE (arg0) == VEC_COND_EXPR
10037 || COMPARISON_CLASS_P (arg0))
10038 {
10039 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10040 arg0, arg1,
10041 /*cond_first_p=*/1);
10042 if (tem != NULL_TREE)
10043 return tem;
10044 }
10045
10046 if (TREE_CODE (arg1) == COND_EXPR
10047 || TREE_CODE (arg1) == VEC_COND_EXPR
10048 || COMPARISON_CLASS_P (arg1))
10049 {
10050 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10051 arg1, arg0,
10052 /*cond_first_p=*/0);
10053 if (tem != NULL_TREE)
10054 return tem;
10055 }
10056 }
10057
10058 switch (code)
10059 {
10060 case MEM_REF:
10061 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10062 if (TREE_CODE (arg0) == ADDR_EXPR
10063 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10064 {
10065 tree iref = TREE_OPERAND (arg0, 0);
10066 return fold_build2 (MEM_REF, type,
10067 TREE_OPERAND (iref, 0),
10068 int_const_binop (PLUS_EXPR, arg1,
10069 TREE_OPERAND (iref, 1)));
10070 }
10071
10072 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10073 if (TREE_CODE (arg0) == ADDR_EXPR
10074 && handled_component_p (TREE_OPERAND (arg0, 0)))
10075 {
10076 tree base;
10077 HOST_WIDE_INT coffset;
10078 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10079 &coffset);
10080 if (!base)
10081 return NULL_TREE;
10082 return fold_build2 (MEM_REF, type,
10083 build_fold_addr_expr (base),
10084 int_const_binop (PLUS_EXPR, arg1,
10085 size_int (coffset)));
10086 }
10087
10088 return NULL_TREE;
10089
10090 case POINTER_PLUS_EXPR:
10091 /* 0 +p index -> (type)index */
10092 if (integer_zerop (arg0))
10093 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10094
10095 /* PTR +p 0 -> PTR */
10096 if (integer_zerop (arg1))
10097 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10098
10099 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10100 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10101 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10102 return fold_convert_loc (loc, type,
10103 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10104 fold_convert_loc (loc, sizetype,
10105 arg1),
10106 fold_convert_loc (loc, sizetype,
10107 arg0)));
10108
10109 /* (PTR +p B) +p A -> PTR +p (B + A) */
10110 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10111 {
10112 tree inner;
10113 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10114 tree arg00 = TREE_OPERAND (arg0, 0);
10115 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10116 arg01, fold_convert_loc (loc, sizetype, arg1));
10117 return fold_convert_loc (loc, type,
10118 fold_build_pointer_plus_loc (loc,
10119 arg00, inner));
10120 }
10121
10122 /* PTR_CST +p CST -> CST1 */
10123 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10124 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10125 fold_convert_loc (loc, type, arg1));
10126
10127 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10128 of the array. Loop optimizer sometimes produce this type of
10129 expressions. */
10130 if (TREE_CODE (arg0) == ADDR_EXPR)
10131 {
10132 tem = try_move_mult_to_index (loc, arg0,
10133 fold_convert_loc (loc,
10134 ssizetype, arg1));
10135 if (tem)
10136 return fold_convert_loc (loc, type, tem);
10137 }
10138
10139 return NULL_TREE;
10140
10141 case PLUS_EXPR:
10142 /* A + (-B) -> A - B */
10143 if (TREE_CODE (arg1) == NEGATE_EXPR)
10144 return fold_build2_loc (loc, MINUS_EXPR, type,
10145 fold_convert_loc (loc, type, arg0),
10146 fold_convert_loc (loc, type,
10147 TREE_OPERAND (arg1, 0)));
10148 /* (-A) + B -> B - A */
10149 if (TREE_CODE (arg0) == NEGATE_EXPR
10150 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10151 return fold_build2_loc (loc, MINUS_EXPR, type,
10152 fold_convert_loc (loc, type, arg1),
10153 fold_convert_loc (loc, type,
10154 TREE_OPERAND (arg0, 0)));
10155
10156 if (INTEGRAL_TYPE_P (type))
10157 {
10158 /* Convert ~A + 1 to -A. */
10159 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10160 && integer_onep (arg1))
10161 return fold_build1_loc (loc, NEGATE_EXPR, type,
10162 fold_convert_loc (loc, type,
10163 TREE_OPERAND (arg0, 0)));
10164
10165 /* ~X + X is -1. */
10166 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10167 && !TYPE_OVERFLOW_TRAPS (type))
10168 {
10169 tree tem = TREE_OPERAND (arg0, 0);
10170
10171 STRIP_NOPS (tem);
10172 if (operand_equal_p (tem, arg1, 0))
10173 {
10174 t1 = build_minus_one_cst (type);
10175 return omit_one_operand_loc (loc, type, t1, arg1);
10176 }
10177 }
10178
10179 /* X + ~X is -1. */
10180 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10181 && !TYPE_OVERFLOW_TRAPS (type))
10182 {
10183 tree tem = TREE_OPERAND (arg1, 0);
10184
10185 STRIP_NOPS (tem);
10186 if (operand_equal_p (arg0, tem, 0))
10187 {
10188 t1 = build_minus_one_cst (type);
10189 return omit_one_operand_loc (loc, type, t1, arg0);
10190 }
10191 }
10192
10193 /* X + (X / CST) * -CST is X % CST. */
10194 if (TREE_CODE (arg1) == MULT_EXPR
10195 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10196 && operand_equal_p (arg0,
10197 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10198 {
10199 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10200 tree cst1 = TREE_OPERAND (arg1, 1);
10201 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10202 cst1, cst0);
10203 if (sum && integer_zerop (sum))
10204 return fold_convert_loc (loc, type,
10205 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10206 TREE_TYPE (arg0), arg0,
10207 cst0));
10208 }
10209 }
10210
10211 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10212 one. Make sure the type is not saturating and has the signedness of
10213 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10214 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10215 if ((TREE_CODE (arg0) == MULT_EXPR
10216 || TREE_CODE (arg1) == MULT_EXPR)
10217 && !TYPE_SATURATING (type)
10218 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10219 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10220 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10221 {
10222 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10223 if (tem)
10224 return tem;
10225 }
10226
10227 if (! FLOAT_TYPE_P (type))
10228 {
10229 if (integer_zerop (arg1))
10230 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10231
10232 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10233 with a constant, and the two constants have no bits in common,
10234 we should treat this as a BIT_IOR_EXPR since this may produce more
10235 simplifications. */
10236 if (TREE_CODE (arg0) == BIT_AND_EXPR
10237 && TREE_CODE (arg1) == BIT_AND_EXPR
10238 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10239 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10240 && integer_zerop (const_binop (BIT_AND_EXPR,
10241 TREE_OPERAND (arg0, 1),
10242 TREE_OPERAND (arg1, 1))))
10243 {
10244 code = BIT_IOR_EXPR;
10245 goto bit_ior;
10246 }
10247
10248 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10249 (plus (plus (mult) (mult)) (foo)) so that we can
10250 take advantage of the factoring cases below. */
10251 if (TYPE_OVERFLOW_WRAPS (type)
10252 && (((TREE_CODE (arg0) == PLUS_EXPR
10253 || TREE_CODE (arg0) == MINUS_EXPR)
10254 && TREE_CODE (arg1) == MULT_EXPR)
10255 || ((TREE_CODE (arg1) == PLUS_EXPR
10256 || TREE_CODE (arg1) == MINUS_EXPR)
10257 && TREE_CODE (arg0) == MULT_EXPR)))
10258 {
10259 tree parg0, parg1, parg, marg;
10260 enum tree_code pcode;
10261
10262 if (TREE_CODE (arg1) == MULT_EXPR)
10263 parg = arg0, marg = arg1;
10264 else
10265 parg = arg1, marg = arg0;
10266 pcode = TREE_CODE (parg);
10267 parg0 = TREE_OPERAND (parg, 0);
10268 parg1 = TREE_OPERAND (parg, 1);
10269 STRIP_NOPS (parg0);
10270 STRIP_NOPS (parg1);
10271
10272 if (TREE_CODE (parg0) == MULT_EXPR
10273 && TREE_CODE (parg1) != MULT_EXPR)
10274 return fold_build2_loc (loc, pcode, type,
10275 fold_build2_loc (loc, PLUS_EXPR, type,
10276 fold_convert_loc (loc, type,
10277 parg0),
10278 fold_convert_loc (loc, type,
10279 marg)),
10280 fold_convert_loc (loc, type, parg1));
10281 if (TREE_CODE (parg0) != MULT_EXPR
10282 && TREE_CODE (parg1) == MULT_EXPR)
10283 return
10284 fold_build2_loc (loc, PLUS_EXPR, type,
10285 fold_convert_loc (loc, type, parg0),
10286 fold_build2_loc (loc, pcode, type,
10287 fold_convert_loc (loc, type, marg),
10288 fold_convert_loc (loc, type,
10289 parg1)));
10290 }
10291 }
10292 else
10293 {
10294 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10295 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10296 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10297
10298 /* Likewise if the operands are reversed. */
10299 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10300 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10301
10302 /* Convert X + -C into X - C. */
10303 if (TREE_CODE (arg1) == REAL_CST
10304 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10305 {
10306 tem = fold_negate_const (arg1, type);
10307 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10308 return fold_build2_loc (loc, MINUS_EXPR, type,
10309 fold_convert_loc (loc, type, arg0),
10310 fold_convert_loc (loc, type, tem));
10311 }
10312
10313 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10314 to __complex__ ( x, y ). This is not the same for SNaNs or
10315 if signed zeros are involved. */
10316 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10317 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10318 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10319 {
10320 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10321 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10322 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10323 bool arg0rz = false, arg0iz = false;
10324 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10325 || (arg0i && (arg0iz = real_zerop (arg0i))))
10326 {
10327 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10328 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10329 if (arg0rz && arg1i && real_zerop (arg1i))
10330 {
10331 tree rp = arg1r ? arg1r
10332 : build1 (REALPART_EXPR, rtype, arg1);
10333 tree ip = arg0i ? arg0i
10334 : build1 (IMAGPART_EXPR, rtype, arg0);
10335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10336 }
10337 else if (arg0iz && arg1r && real_zerop (arg1r))
10338 {
10339 tree rp = arg0r ? arg0r
10340 : build1 (REALPART_EXPR, rtype, arg0);
10341 tree ip = arg1i ? arg1i
10342 : build1 (IMAGPART_EXPR, rtype, arg1);
10343 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10344 }
10345 }
10346 }
10347
10348 if (flag_unsafe_math_optimizations
10349 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10350 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10351 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10352 return tem;
10353
10354 /* Convert x+x into x*2.0. */
10355 if (operand_equal_p (arg0, arg1, 0)
10356 && SCALAR_FLOAT_TYPE_P (type))
10357 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10358 build_real (type, dconst2));
10359
10360 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10361 We associate floats only if the user has specified
10362 -fassociative-math. */
10363 if (flag_associative_math
10364 && TREE_CODE (arg1) == PLUS_EXPR
10365 && TREE_CODE (arg0) != MULT_EXPR)
10366 {
10367 tree tree10 = TREE_OPERAND (arg1, 0);
10368 tree tree11 = TREE_OPERAND (arg1, 1);
10369 if (TREE_CODE (tree11) == MULT_EXPR
10370 && TREE_CODE (tree10) == MULT_EXPR)
10371 {
10372 tree tree0;
10373 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10374 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10375 }
10376 }
10377 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10378 We associate floats only if the user has specified
10379 -fassociative-math. */
10380 if (flag_associative_math
10381 && TREE_CODE (arg0) == PLUS_EXPR
10382 && TREE_CODE (arg1) != MULT_EXPR)
10383 {
10384 tree tree00 = TREE_OPERAND (arg0, 0);
10385 tree tree01 = TREE_OPERAND (arg0, 1);
10386 if (TREE_CODE (tree01) == MULT_EXPR
10387 && TREE_CODE (tree00) == MULT_EXPR)
10388 {
10389 tree tree0;
10390 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10391 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10392 }
10393 }
10394 }
10395
10396 bit_rotate:
10397 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10398 is a rotate of A by C1 bits. */
10399 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10400 is a rotate of A by B bits. */
10401 {
10402 enum tree_code code0, code1;
10403 tree rtype;
10404 code0 = TREE_CODE (arg0);
10405 code1 = TREE_CODE (arg1);
10406 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10407 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10408 && operand_equal_p (TREE_OPERAND (arg0, 0),
10409 TREE_OPERAND (arg1, 0), 0)
10410 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10411 TYPE_UNSIGNED (rtype))
10412 /* Only create rotates in complete modes. Other cases are not
10413 expanded properly. */
10414 && (element_precision (rtype)
10415 == element_precision (TYPE_MODE (rtype))))
10416 {
10417 tree tree01, tree11;
10418 enum tree_code code01, code11;
10419
10420 tree01 = TREE_OPERAND (arg0, 1);
10421 tree11 = TREE_OPERAND (arg1, 1);
10422 STRIP_NOPS (tree01);
10423 STRIP_NOPS (tree11);
10424 code01 = TREE_CODE (tree01);
10425 code11 = TREE_CODE (tree11);
10426 if (code01 == INTEGER_CST
10427 && code11 == INTEGER_CST
10428 && TREE_INT_CST_HIGH (tree01) == 0
10429 && TREE_INT_CST_HIGH (tree11) == 0
10430 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10431 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10432 {
10433 tem = build2_loc (loc, LROTATE_EXPR,
10434 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10435 TREE_OPERAND (arg0, 0),
10436 code0 == LSHIFT_EXPR ? tree01 : tree11);
10437 return fold_convert_loc (loc, type, tem);
10438 }
10439 else if (code11 == MINUS_EXPR)
10440 {
10441 tree tree110, tree111;
10442 tree110 = TREE_OPERAND (tree11, 0);
10443 tree111 = TREE_OPERAND (tree11, 1);
10444 STRIP_NOPS (tree110);
10445 STRIP_NOPS (tree111);
10446 if (TREE_CODE (tree110) == INTEGER_CST
10447 && 0 == compare_tree_int (tree110,
10448 element_precision
10449 (TREE_TYPE (TREE_OPERAND
10450 (arg0, 0))))
10451 && operand_equal_p (tree01, tree111, 0))
10452 return
10453 fold_convert_loc (loc, type,
10454 build2 ((code0 == LSHIFT_EXPR
10455 ? LROTATE_EXPR
10456 : RROTATE_EXPR),
10457 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10458 TREE_OPERAND (arg0, 0), tree01));
10459 }
10460 else if (code01 == MINUS_EXPR)
10461 {
10462 tree tree010, tree011;
10463 tree010 = TREE_OPERAND (tree01, 0);
10464 tree011 = TREE_OPERAND (tree01, 1);
10465 STRIP_NOPS (tree010);
10466 STRIP_NOPS (tree011);
10467 if (TREE_CODE (tree010) == INTEGER_CST
10468 && 0 == compare_tree_int (tree010,
10469 element_precision
10470 (TREE_TYPE (TREE_OPERAND
10471 (arg0, 0))))
10472 && operand_equal_p (tree11, tree011, 0))
10473 return fold_convert_loc
10474 (loc, type,
10475 build2 ((code0 != LSHIFT_EXPR
10476 ? LROTATE_EXPR
10477 : RROTATE_EXPR),
10478 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10479 TREE_OPERAND (arg0, 0), tree11));
10480 }
10481 }
10482 }
10483
10484 associate:
10485 /* In most languages, can't associate operations on floats through
10486 parentheses. Rather than remember where the parentheses were, we
10487 don't associate floats at all, unless the user has specified
10488 -fassociative-math.
10489 And, we need to make sure type is not saturating. */
10490
10491 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10492 && !TYPE_SATURATING (type))
10493 {
10494 tree var0, con0, lit0, minus_lit0;
10495 tree var1, con1, lit1, minus_lit1;
10496 tree atype = type;
10497 bool ok = true;
10498
10499 /* Split both trees into variables, constants, and literals. Then
10500 associate each group together, the constants with literals,
10501 then the result with variables. This increases the chances of
10502 literals being recombined later and of generating relocatable
10503 expressions for the sum of a constant and literal. */
10504 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10505 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10506 code == MINUS_EXPR);
10507
10508 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10509 if (code == MINUS_EXPR)
10510 code = PLUS_EXPR;
10511
10512 /* With undefined overflow prefer doing association in a type
10513 which wraps on overflow, if that is one of the operand types. */
10514 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10515 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10516 {
10517 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10518 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10519 atype = TREE_TYPE (arg0);
10520 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10521 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10522 atype = TREE_TYPE (arg1);
10523 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10524 }
10525
10526 /* With undefined overflow we can only associate constants with one
10527 variable, and constants whose association doesn't overflow. */
10528 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10529 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10530 {
10531 if (var0 && var1)
10532 {
10533 tree tmp0 = var0;
10534 tree tmp1 = var1;
10535
10536 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10537 tmp0 = TREE_OPERAND (tmp0, 0);
10538 if (CONVERT_EXPR_P (tmp0)
10539 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10540 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10541 <= TYPE_PRECISION (atype)))
10542 tmp0 = TREE_OPERAND (tmp0, 0);
10543 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10544 tmp1 = TREE_OPERAND (tmp1, 0);
10545 if (CONVERT_EXPR_P (tmp1)
10546 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10547 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10548 <= TYPE_PRECISION (atype)))
10549 tmp1 = TREE_OPERAND (tmp1, 0);
10550 /* The only case we can still associate with two variables
10551 is if they are the same, modulo negation and bit-pattern
10552 preserving conversions. */
10553 if (!operand_equal_p (tmp0, tmp1, 0))
10554 ok = false;
10555 }
10556 }
10557
10558 /* Only do something if we found more than two objects. Otherwise,
10559 nothing has changed and we risk infinite recursion. */
10560 if (ok
10561 && (2 < ((var0 != 0) + (var1 != 0)
10562 + (con0 != 0) + (con1 != 0)
10563 + (lit0 != 0) + (lit1 != 0)
10564 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10565 {
10566 bool any_overflows = false;
10567 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10568 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10569 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10570 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10571 var0 = associate_trees (loc, var0, var1, code, atype);
10572 con0 = associate_trees (loc, con0, con1, code, atype);
10573 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10574 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10575 code, atype);
10576
10577 /* Preserve the MINUS_EXPR if the negative part of the literal is
10578 greater than the positive part. Otherwise, the multiplicative
10579 folding code (i.e extract_muldiv) may be fooled in case
10580 unsigned constants are subtracted, like in the following
10581 example: ((X*2 + 4) - 8U)/2. */
10582 if (minus_lit0 && lit0)
10583 {
10584 if (TREE_CODE (lit0) == INTEGER_CST
10585 && TREE_CODE (minus_lit0) == INTEGER_CST
10586 && tree_int_cst_lt (lit0, minus_lit0))
10587 {
10588 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10589 MINUS_EXPR, atype);
10590 lit0 = 0;
10591 }
10592 else
10593 {
10594 lit0 = associate_trees (loc, lit0, minus_lit0,
10595 MINUS_EXPR, atype);
10596 minus_lit0 = 0;
10597 }
10598 }
10599
10600 /* Don't introduce overflows through reassociation. */
10601 if (!any_overflows
10602 && ((lit0 && TREE_OVERFLOW (lit0))
10603 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10604 return NULL_TREE;
10605
10606 if (minus_lit0)
10607 {
10608 if (con0 == 0)
10609 return
10610 fold_convert_loc (loc, type,
10611 associate_trees (loc, var0, minus_lit0,
10612 MINUS_EXPR, atype));
10613 else
10614 {
10615 con0 = associate_trees (loc, con0, minus_lit0,
10616 MINUS_EXPR, atype);
10617 return
10618 fold_convert_loc (loc, type,
10619 associate_trees (loc, var0, con0,
10620 PLUS_EXPR, atype));
10621 }
10622 }
10623
10624 con0 = associate_trees (loc, con0, lit0, code, atype);
10625 return
10626 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10627 code, atype));
10628 }
10629 }
10630
10631 return NULL_TREE;
10632
10633 case MINUS_EXPR:
10634 /* Pointer simplifications for subtraction, simple reassociations. */
10635 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10636 {
10637 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10638 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10639 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10640 {
10641 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10642 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10643 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10644 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10645 return fold_build2_loc (loc, PLUS_EXPR, type,
10646 fold_build2_loc (loc, MINUS_EXPR, type,
10647 arg00, arg10),
10648 fold_build2_loc (loc, MINUS_EXPR, type,
10649 arg01, arg11));
10650 }
10651 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10652 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10653 {
10654 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10655 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10656 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10657 fold_convert_loc (loc, type, arg1));
10658 if (tmp)
10659 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10660 }
10661 }
10662 /* A - (-B) -> A + B */
10663 if (TREE_CODE (arg1) == NEGATE_EXPR)
10664 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10665 fold_convert_loc (loc, type,
10666 TREE_OPERAND (arg1, 0)));
10667 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10668 if (TREE_CODE (arg0) == NEGATE_EXPR
10669 && (FLOAT_TYPE_P (type)
10670 || INTEGRAL_TYPE_P (type))
10671 && negate_expr_p (arg1)
10672 && reorder_operands_p (arg0, arg1))
10673 return fold_build2_loc (loc, MINUS_EXPR, type,
10674 fold_convert_loc (loc, type,
10675 negate_expr (arg1)),
10676 fold_convert_loc (loc, type,
10677 TREE_OPERAND (arg0, 0)));
10678 /* Convert -A - 1 to ~A. */
10679 if (INTEGRAL_TYPE_P (type)
10680 && TREE_CODE (arg0) == NEGATE_EXPR
10681 && integer_onep (arg1)
10682 && !TYPE_OVERFLOW_TRAPS (type))
10683 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10684 fold_convert_loc (loc, type,
10685 TREE_OPERAND (arg0, 0)));
10686
10687 /* Convert -1 - A to ~A. */
10688 if (INTEGRAL_TYPE_P (type)
10689 && integer_all_onesp (arg0))
10690 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10691
10692
10693 /* X - (X / CST) * CST is X % CST. */
10694 if (INTEGRAL_TYPE_P (type)
10695 && TREE_CODE (arg1) == MULT_EXPR
10696 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10697 && operand_equal_p (arg0,
10698 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10699 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10700 TREE_OPERAND (arg1, 1), 0))
10701 return
10702 fold_convert_loc (loc, type,
10703 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10704 arg0, TREE_OPERAND (arg1, 1)));
10705
10706 if (! FLOAT_TYPE_P (type))
10707 {
10708 if (integer_zerop (arg0))
10709 return negate_expr (fold_convert_loc (loc, type, arg1));
10710 if (integer_zerop (arg1))
10711 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10712
10713 /* Fold A - (A & B) into ~B & A. */
10714 if (!TREE_SIDE_EFFECTS (arg0)
10715 && TREE_CODE (arg1) == BIT_AND_EXPR)
10716 {
10717 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10718 {
10719 tree arg10 = fold_convert_loc (loc, type,
10720 TREE_OPERAND (arg1, 0));
10721 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10722 fold_build1_loc (loc, BIT_NOT_EXPR,
10723 type, arg10),
10724 fold_convert_loc (loc, type, arg0));
10725 }
10726 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10727 {
10728 tree arg11 = fold_convert_loc (loc,
10729 type, TREE_OPERAND (arg1, 1));
10730 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10731 fold_build1_loc (loc, BIT_NOT_EXPR,
10732 type, arg11),
10733 fold_convert_loc (loc, type, arg0));
10734 }
10735 }
10736
10737 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10738 any power of 2 minus 1. */
10739 if (TREE_CODE (arg0) == BIT_AND_EXPR
10740 && TREE_CODE (arg1) == BIT_AND_EXPR
10741 && operand_equal_p (TREE_OPERAND (arg0, 0),
10742 TREE_OPERAND (arg1, 0), 0))
10743 {
10744 tree mask0 = TREE_OPERAND (arg0, 1);
10745 tree mask1 = TREE_OPERAND (arg1, 1);
10746 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10747
10748 if (operand_equal_p (tem, mask1, 0))
10749 {
10750 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10751 TREE_OPERAND (arg0, 0), mask1);
10752 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10753 }
10754 }
10755 }
10756
10757 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10758 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10759 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10760
10761 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10762 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10763 (-ARG1 + ARG0) reduces to -ARG1. */
10764 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10765 return negate_expr (fold_convert_loc (loc, type, arg1));
10766
10767 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10768 __complex__ ( x, -y ). This is not the same for SNaNs or if
10769 signed zeros are involved. */
10770 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10771 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10772 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10773 {
10774 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10775 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10776 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10777 bool arg0rz = false, arg0iz = false;
10778 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10779 || (arg0i && (arg0iz = real_zerop (arg0i))))
10780 {
10781 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10782 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10783 if (arg0rz && arg1i && real_zerop (arg1i))
10784 {
10785 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10786 arg1r ? arg1r
10787 : build1 (REALPART_EXPR, rtype, arg1));
10788 tree ip = arg0i ? arg0i
10789 : build1 (IMAGPART_EXPR, rtype, arg0);
10790 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10791 }
10792 else if (arg0iz && arg1r && real_zerop (arg1r))
10793 {
10794 tree rp = arg0r ? arg0r
10795 : build1 (REALPART_EXPR, rtype, arg0);
10796 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10797 arg1i ? arg1i
10798 : build1 (IMAGPART_EXPR, rtype, arg1));
10799 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10800 }
10801 }
10802 }
10803
10804 /* Fold &x - &x. This can happen from &x.foo - &x.
10805 This is unsafe for certain floats even in non-IEEE formats.
10806 In IEEE, it is unsafe because it does wrong for NaNs.
10807 Also note that operand_equal_p is always false if an operand
10808 is volatile. */
10809
10810 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10811 && operand_equal_p (arg0, arg1, 0))
10812 return build_zero_cst (type);
10813
10814 /* A - B -> A + (-B) if B is easily negatable. */
10815 if (negate_expr_p (arg1)
10816 && ((FLOAT_TYPE_P (type)
10817 /* Avoid this transformation if B is a positive REAL_CST. */
10818 && (TREE_CODE (arg1) != REAL_CST
10819 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10820 || INTEGRAL_TYPE_P (type)))
10821 return fold_build2_loc (loc, PLUS_EXPR, type,
10822 fold_convert_loc (loc, type, arg0),
10823 fold_convert_loc (loc, type,
10824 negate_expr (arg1)));
10825
10826 /* Try folding difference of addresses. */
10827 {
10828 HOST_WIDE_INT diff;
10829
10830 if ((TREE_CODE (arg0) == ADDR_EXPR
10831 || TREE_CODE (arg1) == ADDR_EXPR)
10832 && ptr_difference_const (arg0, arg1, &diff))
10833 return build_int_cst_type (type, diff);
10834 }
10835
10836 /* Fold &a[i] - &a[j] to i-j. */
10837 if (TREE_CODE (arg0) == ADDR_EXPR
10838 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10839 && TREE_CODE (arg1) == ADDR_EXPR
10840 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10841 {
10842 tree tem = fold_addr_of_array_ref_difference (loc, type,
10843 TREE_OPERAND (arg0, 0),
10844 TREE_OPERAND (arg1, 0));
10845 if (tem)
10846 return tem;
10847 }
10848
10849 if (FLOAT_TYPE_P (type)
10850 && flag_unsafe_math_optimizations
10851 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10852 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10853 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10854 return tem;
10855
10856 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10857 one. Make sure the type is not saturating and has the signedness of
10858 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10859 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10860 if ((TREE_CODE (arg0) == MULT_EXPR
10861 || TREE_CODE (arg1) == MULT_EXPR)
10862 && !TYPE_SATURATING (type)
10863 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10864 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10865 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10866 {
10867 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10868 if (tem)
10869 return tem;
10870 }
10871
10872 goto associate;
10873
10874 case MULT_EXPR:
10875 /* (-A) * (-B) -> A * B */
10876 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10877 return fold_build2_loc (loc, MULT_EXPR, type,
10878 fold_convert_loc (loc, type,
10879 TREE_OPERAND (arg0, 0)),
10880 fold_convert_loc (loc, type,
10881 negate_expr (arg1)));
10882 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10883 return fold_build2_loc (loc, MULT_EXPR, type,
10884 fold_convert_loc (loc, type,
10885 negate_expr (arg0)),
10886 fold_convert_loc (loc, type,
10887 TREE_OPERAND (arg1, 0)));
10888
10889 if (! FLOAT_TYPE_P (type))
10890 {
10891 if (integer_zerop (arg1))
10892 return omit_one_operand_loc (loc, type, arg1, arg0);
10893 if (integer_onep (arg1))
10894 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10895 /* Transform x * -1 into -x. Make sure to do the negation
10896 on the original operand with conversions not stripped
10897 because we can only strip non-sign-changing conversions. */
10898 if (integer_minus_onep (arg1))
10899 return fold_convert_loc (loc, type, negate_expr (op0));
10900 /* Transform x * -C into -x * C if x is easily negatable. */
10901 if (TREE_CODE (arg1) == INTEGER_CST
10902 && tree_int_cst_sgn (arg1) == -1
10903 && negate_expr_p (arg0)
10904 && (tem = negate_expr (arg1)) != arg1
10905 && !TREE_OVERFLOW (tem))
10906 return fold_build2_loc (loc, MULT_EXPR, type,
10907 fold_convert_loc (loc, type,
10908 negate_expr (arg0)),
10909 tem);
10910
10911 /* (a * (1 << b)) is (a << b) */
10912 if (TREE_CODE (arg1) == LSHIFT_EXPR
10913 && integer_onep (TREE_OPERAND (arg1, 0)))
10914 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10915 TREE_OPERAND (arg1, 1));
10916 if (TREE_CODE (arg0) == LSHIFT_EXPR
10917 && integer_onep (TREE_OPERAND (arg0, 0)))
10918 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10919 TREE_OPERAND (arg0, 1));
10920
10921 /* (A + A) * C -> A * 2 * C */
10922 if (TREE_CODE (arg0) == PLUS_EXPR
10923 && TREE_CODE (arg1) == INTEGER_CST
10924 && operand_equal_p (TREE_OPERAND (arg0, 0),
10925 TREE_OPERAND (arg0, 1), 0))
10926 return fold_build2_loc (loc, MULT_EXPR, type,
10927 omit_one_operand_loc (loc, type,
10928 TREE_OPERAND (arg0, 0),
10929 TREE_OPERAND (arg0, 1)),
10930 fold_build2_loc (loc, MULT_EXPR, type,
10931 build_int_cst (type, 2) , arg1));
10932
10933 strict_overflow_p = false;
10934 if (TREE_CODE (arg1) == INTEGER_CST
10935 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10936 &strict_overflow_p)))
10937 {
10938 if (strict_overflow_p)
10939 fold_overflow_warning (("assuming signed overflow does not "
10940 "occur when simplifying "
10941 "multiplication"),
10942 WARN_STRICT_OVERFLOW_MISC);
10943 return fold_convert_loc (loc, type, tem);
10944 }
10945
10946 /* Optimize z * conj(z) for integer complex numbers. */
10947 if (TREE_CODE (arg0) == CONJ_EXPR
10948 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10949 return fold_mult_zconjz (loc, type, arg1);
10950 if (TREE_CODE (arg1) == CONJ_EXPR
10951 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10952 return fold_mult_zconjz (loc, type, arg0);
10953 }
10954 else
10955 {
10956 /* Maybe fold x * 0 to 0. The expressions aren't the same
10957 when x is NaN, since x * 0 is also NaN. Nor are they the
10958 same in modes with signed zeros, since multiplying a
10959 negative value by 0 gives -0, not +0. */
10960 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
10961 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10962 && real_zerop (arg1))
10963 return omit_one_operand_loc (loc, type, arg1, arg0);
10964 /* In IEEE floating point, x*1 is not equivalent to x for snans.
10965 Likewise for complex arithmetic with signed zeros. */
10966 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10967 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10968 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10969 && real_onep (arg1))
10970 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10971
10972 /* Transform x * -1.0 into -x. */
10973 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10974 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10975 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10976 && real_minus_onep (arg1))
10977 return fold_convert_loc (loc, type, negate_expr (arg0));
10978
10979 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10980 the result for floating point types due to rounding so it is applied
10981 only if -fassociative-math was specify. */
10982 if (flag_associative_math
10983 && TREE_CODE (arg0) == RDIV_EXPR
10984 && TREE_CODE (arg1) == REAL_CST
10985 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10986 {
10987 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10988 arg1);
10989 if (tem)
10990 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10991 TREE_OPERAND (arg0, 1));
10992 }
10993
10994 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10995 if (operand_equal_p (arg0, arg1, 0))
10996 {
10997 tree tem = fold_strip_sign_ops (arg0);
10998 if (tem != NULL_TREE)
10999 {
11000 tem = fold_convert_loc (loc, type, tem);
11001 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11002 }
11003 }
11004
11005 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11006 This is not the same for NaNs or if signed zeros are
11007 involved. */
11008 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11009 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11010 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11011 && TREE_CODE (arg1) == COMPLEX_CST
11012 && real_zerop (TREE_REALPART (arg1)))
11013 {
11014 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11015 if (real_onep (TREE_IMAGPART (arg1)))
11016 return
11017 fold_build2_loc (loc, COMPLEX_EXPR, type,
11018 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11019 rtype, arg0)),
11020 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11021 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11022 return
11023 fold_build2_loc (loc, COMPLEX_EXPR, type,
11024 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11025 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11026 rtype, arg0)));
11027 }
11028
11029 /* Optimize z * conj(z) for floating point complex numbers.
11030 Guarded by flag_unsafe_math_optimizations as non-finite
11031 imaginary components don't produce scalar results. */
11032 if (flag_unsafe_math_optimizations
11033 && TREE_CODE (arg0) == CONJ_EXPR
11034 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11035 return fold_mult_zconjz (loc, type, arg1);
11036 if (flag_unsafe_math_optimizations
11037 && TREE_CODE (arg1) == CONJ_EXPR
11038 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11039 return fold_mult_zconjz (loc, type, arg0);
11040
11041 if (flag_unsafe_math_optimizations)
11042 {
11043 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11044 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11045
11046 /* Optimizations of root(...)*root(...). */
11047 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11048 {
11049 tree rootfn, arg;
11050 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11051 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11052
11053 /* Optimize sqrt(x)*sqrt(x) as x. */
11054 if (BUILTIN_SQRT_P (fcode0)
11055 && operand_equal_p (arg00, arg10, 0)
11056 && ! HONOR_SNANS (TYPE_MODE (type)))
11057 return arg00;
11058
11059 /* Optimize root(x)*root(y) as root(x*y). */
11060 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11061 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11062 return build_call_expr_loc (loc, rootfn, 1, arg);
11063 }
11064
11065 /* Optimize expN(x)*expN(y) as expN(x+y). */
11066 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11067 {
11068 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11069 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11070 CALL_EXPR_ARG (arg0, 0),
11071 CALL_EXPR_ARG (arg1, 0));
11072 return build_call_expr_loc (loc, expfn, 1, arg);
11073 }
11074
11075 /* Optimizations of pow(...)*pow(...). */
11076 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11077 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11078 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11079 {
11080 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11081 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11082 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11083 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11084
11085 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11086 if (operand_equal_p (arg01, arg11, 0))
11087 {
11088 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11089 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11090 arg00, arg10);
11091 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11092 }
11093
11094 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11095 if (operand_equal_p (arg00, arg10, 0))
11096 {
11097 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11098 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11099 arg01, arg11);
11100 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11101 }
11102 }
11103
11104 /* Optimize tan(x)*cos(x) as sin(x). */
11105 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11106 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11107 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11108 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11109 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11110 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11111 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11112 CALL_EXPR_ARG (arg1, 0), 0))
11113 {
11114 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11115
11116 if (sinfn != NULL_TREE)
11117 return build_call_expr_loc (loc, sinfn, 1,
11118 CALL_EXPR_ARG (arg0, 0));
11119 }
11120
11121 /* Optimize x*pow(x,c) as pow(x,c+1). */
11122 if (fcode1 == BUILT_IN_POW
11123 || fcode1 == BUILT_IN_POWF
11124 || fcode1 == BUILT_IN_POWL)
11125 {
11126 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11127 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11128 if (TREE_CODE (arg11) == REAL_CST
11129 && !TREE_OVERFLOW (arg11)
11130 && operand_equal_p (arg0, arg10, 0))
11131 {
11132 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11133 REAL_VALUE_TYPE c;
11134 tree arg;
11135
11136 c = TREE_REAL_CST (arg11);
11137 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11138 arg = build_real (type, c);
11139 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11140 }
11141 }
11142
11143 /* Optimize pow(x,c)*x as pow(x,c+1). */
11144 if (fcode0 == BUILT_IN_POW
11145 || fcode0 == BUILT_IN_POWF
11146 || fcode0 == BUILT_IN_POWL)
11147 {
11148 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11149 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11150 if (TREE_CODE (arg01) == REAL_CST
11151 && !TREE_OVERFLOW (arg01)
11152 && operand_equal_p (arg1, arg00, 0))
11153 {
11154 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11155 REAL_VALUE_TYPE c;
11156 tree arg;
11157
11158 c = TREE_REAL_CST (arg01);
11159 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11160 arg = build_real (type, c);
11161 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11162 }
11163 }
11164
11165 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11166 if (!in_gimple_form
11167 && optimize
11168 && operand_equal_p (arg0, arg1, 0))
11169 {
11170 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11171
11172 if (powfn)
11173 {
11174 tree arg = build_real (type, dconst2);
11175 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11176 }
11177 }
11178 }
11179 }
11180 goto associate;
11181
11182 case BIT_IOR_EXPR:
11183 bit_ior:
11184 if (integer_all_onesp (arg1))
11185 return omit_one_operand_loc (loc, type, arg1, arg0);
11186 if (integer_zerop (arg1))
11187 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11188 if (operand_equal_p (arg0, arg1, 0))
11189 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11190
11191 /* ~X | X is -1. */
11192 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11193 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11194 {
11195 t1 = build_zero_cst (type);
11196 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11197 return omit_one_operand_loc (loc, type, t1, arg1);
11198 }
11199
11200 /* X | ~X is -1. */
11201 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11202 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11203 {
11204 t1 = build_zero_cst (type);
11205 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11206 return omit_one_operand_loc (loc, type, t1, arg0);
11207 }
11208
11209 /* Canonicalize (X & C1) | C2. */
11210 if (TREE_CODE (arg0) == BIT_AND_EXPR
11211 && TREE_CODE (arg1) == INTEGER_CST
11212 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11213 {
11214 double_int c1, c2, c3, msk;
11215 int width = TYPE_PRECISION (type), w;
11216 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11217 c2 = tree_to_double_int (arg1);
11218
11219 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11220 if ((c1 & c2) == c1)
11221 return omit_one_operand_loc (loc, type, arg1,
11222 TREE_OPERAND (arg0, 0));
11223
11224 msk = double_int::mask (width);
11225
11226 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11227 if (msk.and_not (c1 | c2).is_zero ())
11228 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11229 TREE_OPERAND (arg0, 0), arg1);
11230
11231 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11232 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11233 mode which allows further optimizations. */
11234 c1 &= msk;
11235 c2 &= msk;
11236 c3 = c1.and_not (c2);
11237 for (w = BITS_PER_UNIT;
11238 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11239 w <<= 1)
11240 {
11241 unsigned HOST_WIDE_INT mask
11242 = (unsigned HOST_WIDE_INT) -1 >> (HOST_BITS_PER_WIDE_INT - w);
11243 if (((c1.low | c2.low) & mask) == mask
11244 && (c1.low & ~mask) == 0 && c1.high == 0)
11245 {
11246 c3 = double_int::from_uhwi (mask);
11247 break;
11248 }
11249 }
11250 if (c3 != c1)
11251 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11252 fold_build2_loc (loc, BIT_AND_EXPR, type,
11253 TREE_OPERAND (arg0, 0),
11254 double_int_to_tree (type,
11255 c3)),
11256 arg1);
11257 }
11258
11259 /* (X & Y) | Y is (X, Y). */
11260 if (TREE_CODE (arg0) == BIT_AND_EXPR
11261 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11262 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11263 /* (X & Y) | X is (Y, X). */
11264 if (TREE_CODE (arg0) == BIT_AND_EXPR
11265 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11266 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11267 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11268 /* X | (X & Y) is (Y, X). */
11269 if (TREE_CODE (arg1) == BIT_AND_EXPR
11270 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11271 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11272 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11273 /* X | (Y & X) is (Y, X). */
11274 if (TREE_CODE (arg1) == BIT_AND_EXPR
11275 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11276 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11277 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11278
11279 /* (X & ~Y) | (~X & Y) is X ^ Y */
11280 if (TREE_CODE (arg0) == BIT_AND_EXPR
11281 && TREE_CODE (arg1) == BIT_AND_EXPR)
11282 {
11283 tree a0, a1, l0, l1, n0, n1;
11284
11285 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11286 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11287
11288 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11289 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11290
11291 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11292 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11293
11294 if ((operand_equal_p (n0, a0, 0)
11295 && operand_equal_p (n1, a1, 0))
11296 || (operand_equal_p (n0, a1, 0)
11297 && operand_equal_p (n1, a0, 0)))
11298 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11299 }
11300
11301 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11302 if (t1 != NULL_TREE)
11303 return t1;
11304
11305 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11306
11307 This results in more efficient code for machines without a NAND
11308 instruction. Combine will canonicalize to the first form
11309 which will allow use of NAND instructions provided by the
11310 backend if they exist. */
11311 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11312 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11313 {
11314 return
11315 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11316 build2 (BIT_AND_EXPR, type,
11317 fold_convert_loc (loc, type,
11318 TREE_OPERAND (arg0, 0)),
11319 fold_convert_loc (loc, type,
11320 TREE_OPERAND (arg1, 0))));
11321 }
11322
11323 /* See if this can be simplified into a rotate first. If that
11324 is unsuccessful continue in the association code. */
11325 goto bit_rotate;
11326
11327 case BIT_XOR_EXPR:
11328 if (integer_zerop (arg1))
11329 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11330 if (integer_all_onesp (arg1))
11331 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11332 if (operand_equal_p (arg0, arg1, 0))
11333 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11334
11335 /* ~X ^ X is -1. */
11336 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11337 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11338 {
11339 t1 = build_zero_cst (type);
11340 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11341 return omit_one_operand_loc (loc, type, t1, arg1);
11342 }
11343
11344 /* X ^ ~X is -1. */
11345 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11346 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11347 {
11348 t1 = build_zero_cst (type);
11349 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11350 return omit_one_operand_loc (loc, type, t1, arg0);
11351 }
11352
11353 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11354 with a constant, and the two constants have no bits in common,
11355 we should treat this as a BIT_IOR_EXPR since this may produce more
11356 simplifications. */
11357 if (TREE_CODE (arg0) == BIT_AND_EXPR
11358 && TREE_CODE (arg1) == BIT_AND_EXPR
11359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11360 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11361 && integer_zerop (const_binop (BIT_AND_EXPR,
11362 TREE_OPERAND (arg0, 1),
11363 TREE_OPERAND (arg1, 1))))
11364 {
11365 code = BIT_IOR_EXPR;
11366 goto bit_ior;
11367 }
11368
11369 /* (X | Y) ^ X -> Y & ~ X*/
11370 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11371 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11372 {
11373 tree t2 = TREE_OPERAND (arg0, 1);
11374 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11375 arg1);
11376 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11377 fold_convert_loc (loc, type, t2),
11378 fold_convert_loc (loc, type, t1));
11379 return t1;
11380 }
11381
11382 /* (Y | X) ^ X -> Y & ~ X*/
11383 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11384 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11385 {
11386 tree t2 = TREE_OPERAND (arg0, 0);
11387 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11388 arg1);
11389 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11390 fold_convert_loc (loc, type, t2),
11391 fold_convert_loc (loc, type, t1));
11392 return t1;
11393 }
11394
11395 /* X ^ (X | Y) -> Y & ~ X*/
11396 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11397 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11398 {
11399 tree t2 = TREE_OPERAND (arg1, 1);
11400 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11401 arg0);
11402 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11403 fold_convert_loc (loc, type, t2),
11404 fold_convert_loc (loc, type, t1));
11405 return t1;
11406 }
11407
11408 /* X ^ (Y | X) -> Y & ~ X*/
11409 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11410 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11411 {
11412 tree t2 = TREE_OPERAND (arg1, 0);
11413 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11414 arg0);
11415 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11416 fold_convert_loc (loc, type, t2),
11417 fold_convert_loc (loc, type, t1));
11418 return t1;
11419 }
11420
11421 /* Convert ~X ^ ~Y to X ^ Y. */
11422 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11423 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11424 return fold_build2_loc (loc, code, type,
11425 fold_convert_loc (loc, type,
11426 TREE_OPERAND (arg0, 0)),
11427 fold_convert_loc (loc, type,
11428 TREE_OPERAND (arg1, 0)));
11429
11430 /* Convert ~X ^ C to X ^ ~C. */
11431 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11432 && TREE_CODE (arg1) == INTEGER_CST)
11433 return fold_build2_loc (loc, code, type,
11434 fold_convert_loc (loc, type,
11435 TREE_OPERAND (arg0, 0)),
11436 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11437
11438 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11439 if (TREE_CODE (arg0) == BIT_AND_EXPR
11440 && integer_onep (TREE_OPERAND (arg0, 1))
11441 && integer_onep (arg1))
11442 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11443 build_zero_cst (TREE_TYPE (arg0)));
11444
11445 /* Fold (X & Y) ^ Y as ~X & Y. */
11446 if (TREE_CODE (arg0) == BIT_AND_EXPR
11447 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11448 {
11449 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11450 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11451 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11452 fold_convert_loc (loc, type, arg1));
11453 }
11454 /* Fold (X & Y) ^ X as ~Y & X. */
11455 if (TREE_CODE (arg0) == BIT_AND_EXPR
11456 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11457 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11458 {
11459 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11460 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11461 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11462 fold_convert_loc (loc, type, arg1));
11463 }
11464 /* Fold X ^ (X & Y) as X & ~Y. */
11465 if (TREE_CODE (arg1) == BIT_AND_EXPR
11466 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11467 {
11468 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11469 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11470 fold_convert_loc (loc, type, arg0),
11471 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11472 }
11473 /* Fold X ^ (Y & X) as ~Y & X. */
11474 if (TREE_CODE (arg1) == BIT_AND_EXPR
11475 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11476 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11477 {
11478 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11479 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11480 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11481 fold_convert_loc (loc, type, arg0));
11482 }
11483
11484 /* See if this can be simplified into a rotate first. If that
11485 is unsuccessful continue in the association code. */
11486 goto bit_rotate;
11487
11488 case BIT_AND_EXPR:
11489 if (integer_all_onesp (arg1))
11490 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11491 if (integer_zerop (arg1))
11492 return omit_one_operand_loc (loc, type, arg1, arg0);
11493 if (operand_equal_p (arg0, arg1, 0))
11494 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11495
11496 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11497 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11498 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11499 || (TREE_CODE (arg0) == EQ_EXPR
11500 && integer_zerop (TREE_OPERAND (arg0, 1))))
11501 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11502 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11503
11504 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11505 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11506 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11507 || (TREE_CODE (arg1) == EQ_EXPR
11508 && integer_zerop (TREE_OPERAND (arg1, 1))))
11509 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11510 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11511
11512 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11513 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11514 && TREE_CODE (arg1) == INTEGER_CST
11515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11516 {
11517 tree tmp1 = fold_convert_loc (loc, type, arg1);
11518 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11519 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11520 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11521 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11522 return
11523 fold_convert_loc (loc, type,
11524 fold_build2_loc (loc, BIT_IOR_EXPR,
11525 type, tmp2, tmp3));
11526 }
11527
11528 /* (X | Y) & Y is (X, Y). */
11529 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11530 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11531 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11532 /* (X | Y) & X is (Y, X). */
11533 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11534 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11535 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11536 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11537 /* X & (X | Y) is (Y, X). */
11538 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11539 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11540 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11541 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11542 /* X & (Y | X) is (Y, X). */
11543 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11544 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11545 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11546 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11547
11548 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11549 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11550 && integer_onep (TREE_OPERAND (arg0, 1))
11551 && integer_onep (arg1))
11552 {
11553 tree tem2;
11554 tem = TREE_OPERAND (arg0, 0);
11555 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11556 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11557 tem, tem2);
11558 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11559 build_zero_cst (TREE_TYPE (tem)));
11560 }
11561 /* Fold ~X & 1 as (X & 1) == 0. */
11562 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11563 && integer_onep (arg1))
11564 {
11565 tree tem2;
11566 tem = TREE_OPERAND (arg0, 0);
11567 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11568 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11569 tem, tem2);
11570 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11571 build_zero_cst (TREE_TYPE (tem)));
11572 }
11573 /* Fold !X & 1 as X == 0. */
11574 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11575 && integer_onep (arg1))
11576 {
11577 tem = TREE_OPERAND (arg0, 0);
11578 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11579 build_zero_cst (TREE_TYPE (tem)));
11580 }
11581
11582 /* Fold (X ^ Y) & Y as ~X & Y. */
11583 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11584 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11585 {
11586 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11587 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11588 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11589 fold_convert_loc (loc, type, arg1));
11590 }
11591 /* Fold (X ^ Y) & X as ~Y & X. */
11592 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11593 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11594 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11595 {
11596 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11597 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11598 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11599 fold_convert_loc (loc, type, arg1));
11600 }
11601 /* Fold X & (X ^ Y) as X & ~Y. */
11602 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11603 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11604 {
11605 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11606 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11607 fold_convert_loc (loc, type, arg0),
11608 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11609 }
11610 /* Fold X & (Y ^ X) as ~Y & X. */
11611 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11612 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11613 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11614 {
11615 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11616 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11617 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11618 fold_convert_loc (loc, type, arg0));
11619 }
11620
11621 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11622 multiple of 1 << CST. */
11623 if (TREE_CODE (arg1) == INTEGER_CST)
11624 {
11625 double_int cst1 = tree_to_double_int (arg1);
11626 double_int ncst1 = (-cst1).ext(TYPE_PRECISION (TREE_TYPE (arg1)),
11627 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11628 if ((cst1 & ncst1) == ncst1
11629 && multiple_of_p (type, arg0,
11630 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11631 return fold_convert_loc (loc, type, arg0);
11632 }
11633
11634 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11635 bits from CST2. */
11636 if (TREE_CODE (arg1) == INTEGER_CST
11637 && TREE_CODE (arg0) == MULT_EXPR
11638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11639 {
11640 int arg1tz
11641 = tree_to_double_int (TREE_OPERAND (arg0, 1)).trailing_zeros ();
11642 if (arg1tz > 0)
11643 {
11644 double_int arg1mask, masked;
11645 arg1mask = ~double_int::mask (arg1tz);
11646 arg1mask = arg1mask.ext (TYPE_PRECISION (type),
11647 TYPE_UNSIGNED (type));
11648 masked = arg1mask & tree_to_double_int (arg1);
11649 if (masked.is_zero ())
11650 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11651 arg0, arg1);
11652 else if (masked != tree_to_double_int (arg1))
11653 return fold_build2_loc (loc, code, type, op0,
11654 double_int_to_tree (type, masked));
11655 }
11656 }
11657
11658 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11659 ((A & N) + B) & M -> (A + B) & M
11660 Similarly if (N & M) == 0,
11661 ((A | N) + B) & M -> (A + B) & M
11662 and for - instead of + (or unary - instead of +)
11663 and/or ^ instead of |.
11664 If B is constant and (B & M) == 0, fold into A & M. */
11665 if (host_integerp (arg1, 1))
11666 {
11667 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11668 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11669 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11670 && (TREE_CODE (arg0) == PLUS_EXPR
11671 || TREE_CODE (arg0) == MINUS_EXPR
11672 || TREE_CODE (arg0) == NEGATE_EXPR)
11673 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11674 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11675 {
11676 tree pmop[2];
11677 int which = 0;
11678 unsigned HOST_WIDE_INT cst0;
11679
11680 /* Now we know that arg0 is (C + D) or (C - D) or
11681 -C and arg1 (M) is == (1LL << cst) - 1.
11682 Store C into PMOP[0] and D into PMOP[1]. */
11683 pmop[0] = TREE_OPERAND (arg0, 0);
11684 pmop[1] = NULL;
11685 if (TREE_CODE (arg0) != NEGATE_EXPR)
11686 {
11687 pmop[1] = TREE_OPERAND (arg0, 1);
11688 which = 1;
11689 }
11690
11691 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11692 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11693 & cst1) != cst1)
11694 which = -1;
11695
11696 for (; which >= 0; which--)
11697 switch (TREE_CODE (pmop[which]))
11698 {
11699 case BIT_AND_EXPR:
11700 case BIT_IOR_EXPR:
11701 case BIT_XOR_EXPR:
11702 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11703 != INTEGER_CST)
11704 break;
11705 /* tree_low_cst not used, because we don't care about
11706 the upper bits. */
11707 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11708 cst0 &= cst1;
11709 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11710 {
11711 if (cst0 != cst1)
11712 break;
11713 }
11714 else if (cst0 != 0)
11715 break;
11716 /* If C or D is of the form (A & N) where
11717 (N & M) == M, or of the form (A | N) or
11718 (A ^ N) where (N & M) == 0, replace it with A. */
11719 pmop[which] = TREE_OPERAND (pmop[which], 0);
11720 break;
11721 case INTEGER_CST:
11722 /* If C or D is a N where (N & M) == 0, it can be
11723 omitted (assumed 0). */
11724 if ((TREE_CODE (arg0) == PLUS_EXPR
11725 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11726 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11727 pmop[which] = NULL;
11728 break;
11729 default:
11730 break;
11731 }
11732
11733 /* Only build anything new if we optimized one or both arguments
11734 above. */
11735 if (pmop[0] != TREE_OPERAND (arg0, 0)
11736 || (TREE_CODE (arg0) != NEGATE_EXPR
11737 && pmop[1] != TREE_OPERAND (arg0, 1)))
11738 {
11739 tree utype = TREE_TYPE (arg0);
11740 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11741 {
11742 /* Perform the operations in a type that has defined
11743 overflow behavior. */
11744 utype = unsigned_type_for (TREE_TYPE (arg0));
11745 if (pmop[0] != NULL)
11746 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11747 if (pmop[1] != NULL)
11748 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11749 }
11750
11751 if (TREE_CODE (arg0) == NEGATE_EXPR)
11752 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11753 else if (TREE_CODE (arg0) == PLUS_EXPR)
11754 {
11755 if (pmop[0] != NULL && pmop[1] != NULL)
11756 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11757 pmop[0], pmop[1]);
11758 else if (pmop[0] != NULL)
11759 tem = pmop[0];
11760 else if (pmop[1] != NULL)
11761 tem = pmop[1];
11762 else
11763 return build_int_cst (type, 0);
11764 }
11765 else if (pmop[0] == NULL)
11766 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11767 else
11768 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11769 pmop[0], pmop[1]);
11770 /* TEM is now the new binary +, - or unary - replacement. */
11771 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11772 fold_convert_loc (loc, utype, arg1));
11773 return fold_convert_loc (loc, type, tem);
11774 }
11775 }
11776 }
11777
11778 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11779 if (t1 != NULL_TREE)
11780 return t1;
11781 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11782 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11783 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11784 {
11785 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11786
11787 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11788 && (~TREE_INT_CST_LOW (arg1)
11789 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11790 return
11791 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11792 }
11793
11794 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11795
11796 This results in more efficient code for machines without a NOR
11797 instruction. Combine will canonicalize to the first form
11798 which will allow use of NOR instructions provided by the
11799 backend if they exist. */
11800 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11801 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11802 {
11803 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11804 build2 (BIT_IOR_EXPR, type,
11805 fold_convert_loc (loc, type,
11806 TREE_OPERAND (arg0, 0)),
11807 fold_convert_loc (loc, type,
11808 TREE_OPERAND (arg1, 0))));
11809 }
11810
11811 /* If arg0 is derived from the address of an object or function, we may
11812 be able to fold this expression using the object or function's
11813 alignment. */
11814 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
11815 {
11816 unsigned HOST_WIDE_INT modulus, residue;
11817 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
11818
11819 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11820 integer_onep (arg1));
11821
11822 /* This works because modulus is a power of 2. If this weren't the
11823 case, we'd have to replace it by its greatest power-of-2
11824 divisor: modulus & -modulus. */
11825 if (low < modulus)
11826 return build_int_cst (type, residue & low);
11827 }
11828
11829 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11830 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11831 if the new mask might be further optimized. */
11832 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11833 || TREE_CODE (arg0) == RSHIFT_EXPR)
11834 && host_integerp (TREE_OPERAND (arg0, 1), 1)
11835 && host_integerp (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)))
11836 && tree_low_cst (TREE_OPERAND (arg0, 1), 1)
11837 < TYPE_PRECISION (TREE_TYPE (arg0))
11838 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11839 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0)
11840 {
11841 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
11842 unsigned HOST_WIDE_INT mask
11843 = tree_low_cst (arg1, TYPE_UNSIGNED (TREE_TYPE (arg1)));
11844 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11845 tree shift_type = TREE_TYPE (arg0);
11846
11847 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11848 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11849 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11850 && TYPE_PRECISION (TREE_TYPE (arg0))
11851 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
11852 {
11853 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11854 tree arg00 = TREE_OPERAND (arg0, 0);
11855 /* See if more bits can be proven as zero because of
11856 zero extension. */
11857 if (TREE_CODE (arg00) == NOP_EXPR
11858 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11859 {
11860 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11861 if (TYPE_PRECISION (inner_type)
11862 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
11863 && TYPE_PRECISION (inner_type) < prec)
11864 {
11865 prec = TYPE_PRECISION (inner_type);
11866 /* See if we can shorten the right shift. */
11867 if (shiftc < prec)
11868 shift_type = inner_type;
11869 }
11870 }
11871 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11872 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11873 zerobits <<= prec - shiftc;
11874 /* For arithmetic shift if sign bit could be set, zerobits
11875 can contain actually sign bits, so no transformation is
11876 possible, unless MASK masks them all away. In that
11877 case the shift needs to be converted into logical shift. */
11878 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11879 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11880 {
11881 if ((mask & zerobits) == 0)
11882 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11883 else
11884 zerobits = 0;
11885 }
11886 }
11887
11888 /* ((X << 16) & 0xff00) is (X, 0). */
11889 if ((mask & zerobits) == mask)
11890 return omit_one_operand_loc (loc, type,
11891 build_int_cst (type, 0), arg0);
11892
11893 newmask = mask | zerobits;
11894 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11895 {
11896 /* Only do the transformation if NEWMASK is some integer
11897 mode's mask. */
11898 for (prec = BITS_PER_UNIT;
11899 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11900 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11901 break;
11902 if (prec < HOST_BITS_PER_WIDE_INT
11903 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11904 {
11905 tree newmaskt;
11906
11907 if (shift_type != TREE_TYPE (arg0))
11908 {
11909 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11910 fold_convert_loc (loc, shift_type,
11911 TREE_OPERAND (arg0, 0)),
11912 TREE_OPERAND (arg0, 1));
11913 tem = fold_convert_loc (loc, type, tem);
11914 }
11915 else
11916 tem = op0;
11917 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11918 if (!tree_int_cst_equal (newmaskt, arg1))
11919 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11920 }
11921 }
11922 }
11923
11924 goto associate;
11925
11926 case RDIV_EXPR:
11927 /* Don't touch a floating-point divide by zero unless the mode
11928 of the constant can represent infinity. */
11929 if (TREE_CODE (arg1) == REAL_CST
11930 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11931 && real_zerop (arg1))
11932 return NULL_TREE;
11933
11934 /* Optimize A / A to 1.0 if we don't care about
11935 NaNs or Infinities. Skip the transformation
11936 for non-real operands. */
11937 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
11938 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11939 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
11940 && operand_equal_p (arg0, arg1, 0))
11941 {
11942 tree r = build_real (TREE_TYPE (arg0), dconst1);
11943
11944 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11945 }
11946
11947 /* The complex version of the above A / A optimization. */
11948 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11949 && operand_equal_p (arg0, arg1, 0))
11950 {
11951 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
11952 if (! HONOR_NANS (TYPE_MODE (elem_type))
11953 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
11954 {
11955 tree r = build_real (elem_type, dconst1);
11956 /* omit_two_operands will call fold_convert for us. */
11957 return omit_two_operands_loc (loc, type, r, arg0, arg1);
11958 }
11959 }
11960
11961 /* (-A) / (-B) -> A / B */
11962 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11963 return fold_build2_loc (loc, RDIV_EXPR, type,
11964 TREE_OPERAND (arg0, 0),
11965 negate_expr (arg1));
11966 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11967 return fold_build2_loc (loc, RDIV_EXPR, type,
11968 negate_expr (arg0),
11969 TREE_OPERAND (arg1, 0));
11970
11971 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
11972 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11973 && real_onep (arg1))
11974 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11975
11976 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
11977 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11978 && real_minus_onep (arg1))
11979 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
11980 negate_expr (arg0)));
11981
11982 /* If ARG1 is a constant, we can convert this to a multiply by the
11983 reciprocal. This does not have the same rounding properties,
11984 so only do this if -freciprocal-math. We can actually
11985 always safely do it if ARG1 is a power of two, but it's hard to
11986 tell if it is or not in a portable manner. */
11987 if (optimize
11988 && (TREE_CODE (arg1) == REAL_CST
11989 || (TREE_CODE (arg1) == COMPLEX_CST
11990 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
11991 || (TREE_CODE (arg1) == VECTOR_CST
11992 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
11993 {
11994 if (flag_reciprocal_math
11995 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
11996 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
11997 /* Find the reciprocal if optimizing and the result is exact.
11998 TODO: Complex reciprocal not implemented. */
11999 if (TREE_CODE (arg1) != COMPLEX_CST)
12000 {
12001 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12002
12003 if (inverse)
12004 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12005 }
12006 }
12007 /* Convert A/B/C to A/(B*C). */
12008 if (flag_reciprocal_math
12009 && TREE_CODE (arg0) == RDIV_EXPR)
12010 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12011 fold_build2_loc (loc, MULT_EXPR, type,
12012 TREE_OPERAND (arg0, 1), arg1));
12013
12014 /* Convert A/(B/C) to (A/B)*C. */
12015 if (flag_reciprocal_math
12016 && TREE_CODE (arg1) == RDIV_EXPR)
12017 return fold_build2_loc (loc, MULT_EXPR, type,
12018 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12019 TREE_OPERAND (arg1, 0)),
12020 TREE_OPERAND (arg1, 1));
12021
12022 /* Convert C1/(X*C2) into (C1/C2)/X. */
12023 if (flag_reciprocal_math
12024 && TREE_CODE (arg1) == MULT_EXPR
12025 && TREE_CODE (arg0) == REAL_CST
12026 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12027 {
12028 tree tem = const_binop (RDIV_EXPR, arg0,
12029 TREE_OPERAND (arg1, 1));
12030 if (tem)
12031 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12032 TREE_OPERAND (arg1, 0));
12033 }
12034
12035 if (flag_unsafe_math_optimizations)
12036 {
12037 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12038 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12039
12040 /* Optimize sin(x)/cos(x) as tan(x). */
12041 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12042 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12043 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12044 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12045 CALL_EXPR_ARG (arg1, 0), 0))
12046 {
12047 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12048
12049 if (tanfn != NULL_TREE)
12050 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12051 }
12052
12053 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12054 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12055 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12056 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12057 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12058 CALL_EXPR_ARG (arg1, 0), 0))
12059 {
12060 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12061
12062 if (tanfn != NULL_TREE)
12063 {
12064 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12065 CALL_EXPR_ARG (arg0, 0));
12066 return fold_build2_loc (loc, RDIV_EXPR, type,
12067 build_real (type, dconst1), tmp);
12068 }
12069 }
12070
12071 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12072 NaNs or Infinities. */
12073 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12074 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12075 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12076 {
12077 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12078 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12079
12080 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12081 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12082 && operand_equal_p (arg00, arg01, 0))
12083 {
12084 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12085
12086 if (cosfn != NULL_TREE)
12087 return build_call_expr_loc (loc, cosfn, 1, arg00);
12088 }
12089 }
12090
12091 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12092 NaNs or Infinities. */
12093 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12094 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12095 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12096 {
12097 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12098 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12099
12100 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12101 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12102 && operand_equal_p (arg00, arg01, 0))
12103 {
12104 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12105
12106 if (cosfn != NULL_TREE)
12107 {
12108 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12109 return fold_build2_loc (loc, RDIV_EXPR, type,
12110 build_real (type, dconst1),
12111 tmp);
12112 }
12113 }
12114 }
12115
12116 /* Optimize pow(x,c)/x as pow(x,c-1). */
12117 if (fcode0 == BUILT_IN_POW
12118 || fcode0 == BUILT_IN_POWF
12119 || fcode0 == BUILT_IN_POWL)
12120 {
12121 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12122 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12123 if (TREE_CODE (arg01) == REAL_CST
12124 && !TREE_OVERFLOW (arg01)
12125 && operand_equal_p (arg1, arg00, 0))
12126 {
12127 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12128 REAL_VALUE_TYPE c;
12129 tree arg;
12130
12131 c = TREE_REAL_CST (arg01);
12132 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12133 arg = build_real (type, c);
12134 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12135 }
12136 }
12137
12138 /* Optimize a/root(b/c) into a*root(c/b). */
12139 if (BUILTIN_ROOT_P (fcode1))
12140 {
12141 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12142
12143 if (TREE_CODE (rootarg) == RDIV_EXPR)
12144 {
12145 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12146 tree b = TREE_OPERAND (rootarg, 0);
12147 tree c = TREE_OPERAND (rootarg, 1);
12148
12149 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12150
12151 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12152 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12153 }
12154 }
12155
12156 /* Optimize x/expN(y) into x*expN(-y). */
12157 if (BUILTIN_EXPONENT_P (fcode1))
12158 {
12159 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12160 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12161 arg1 = build_call_expr_loc (loc,
12162 expfn, 1,
12163 fold_convert_loc (loc, type, arg));
12164 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12165 }
12166
12167 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12168 if (fcode1 == BUILT_IN_POW
12169 || fcode1 == BUILT_IN_POWF
12170 || fcode1 == BUILT_IN_POWL)
12171 {
12172 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12173 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12174 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12175 tree neg11 = fold_convert_loc (loc, type,
12176 negate_expr (arg11));
12177 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12178 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12179 }
12180 }
12181 return NULL_TREE;
12182
12183 case TRUNC_DIV_EXPR:
12184 /* Optimize (X & (-A)) / A where A is a power of 2,
12185 to X >> log2(A) */
12186 if (TREE_CODE (arg0) == BIT_AND_EXPR
12187 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12188 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12189 {
12190 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12191 arg1, TREE_OPERAND (arg0, 1));
12192 if (sum && integer_zerop (sum)) {
12193 unsigned long pow2;
12194
12195 if (TREE_INT_CST_LOW (arg1))
12196 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12197 else
12198 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12199 + HOST_BITS_PER_WIDE_INT;
12200
12201 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12202 TREE_OPERAND (arg0, 0),
12203 build_int_cst (integer_type_node, pow2));
12204 }
12205 }
12206
12207 /* Fall through */
12208
12209 case FLOOR_DIV_EXPR:
12210 /* Simplify A / (B << N) where A and B are positive and B is
12211 a power of 2, to A >> (N + log2(B)). */
12212 strict_overflow_p = false;
12213 if (TREE_CODE (arg1) == LSHIFT_EXPR
12214 && (TYPE_UNSIGNED (type)
12215 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12216 {
12217 tree sval = TREE_OPERAND (arg1, 0);
12218 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12219 {
12220 tree sh_cnt = TREE_OPERAND (arg1, 1);
12221 unsigned long pow2;
12222
12223 if (TREE_INT_CST_LOW (sval))
12224 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12225 else
12226 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12227 + HOST_BITS_PER_WIDE_INT;
12228
12229 if (strict_overflow_p)
12230 fold_overflow_warning (("assuming signed overflow does not "
12231 "occur when simplifying A / (B << N)"),
12232 WARN_STRICT_OVERFLOW_MISC);
12233
12234 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12235 sh_cnt,
12236 build_int_cst (TREE_TYPE (sh_cnt),
12237 pow2));
12238 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12239 fold_convert_loc (loc, type, arg0), sh_cnt);
12240 }
12241 }
12242
12243 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12244 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12245 if (INTEGRAL_TYPE_P (type)
12246 && TYPE_UNSIGNED (type)
12247 && code == FLOOR_DIV_EXPR)
12248 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12249
12250 /* Fall through */
12251
12252 case ROUND_DIV_EXPR:
12253 case CEIL_DIV_EXPR:
12254 case EXACT_DIV_EXPR:
12255 if (integer_onep (arg1))
12256 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12257 if (integer_zerop (arg1))
12258 return NULL_TREE;
12259 /* X / -1 is -X. */
12260 if (!TYPE_UNSIGNED (type)
12261 && TREE_CODE (arg1) == INTEGER_CST
12262 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12263 && TREE_INT_CST_HIGH (arg1) == -1)
12264 return fold_convert_loc (loc, type, negate_expr (arg0));
12265
12266 /* Convert -A / -B to A / B when the type is signed and overflow is
12267 undefined. */
12268 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12269 && TREE_CODE (arg0) == NEGATE_EXPR
12270 && negate_expr_p (arg1))
12271 {
12272 if (INTEGRAL_TYPE_P (type))
12273 fold_overflow_warning (("assuming signed overflow does not occur "
12274 "when distributing negation across "
12275 "division"),
12276 WARN_STRICT_OVERFLOW_MISC);
12277 return fold_build2_loc (loc, code, type,
12278 fold_convert_loc (loc, type,
12279 TREE_OPERAND (arg0, 0)),
12280 fold_convert_loc (loc, type,
12281 negate_expr (arg1)));
12282 }
12283 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12284 && TREE_CODE (arg1) == NEGATE_EXPR
12285 && negate_expr_p (arg0))
12286 {
12287 if (INTEGRAL_TYPE_P (type))
12288 fold_overflow_warning (("assuming signed overflow does not occur "
12289 "when distributing negation across "
12290 "division"),
12291 WARN_STRICT_OVERFLOW_MISC);
12292 return fold_build2_loc (loc, code, type,
12293 fold_convert_loc (loc, type,
12294 negate_expr (arg0)),
12295 fold_convert_loc (loc, type,
12296 TREE_OPERAND (arg1, 0)));
12297 }
12298
12299 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12300 operation, EXACT_DIV_EXPR.
12301
12302 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12303 At one time others generated faster code, it's not clear if they do
12304 after the last round to changes to the DIV code in expmed.c. */
12305 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12306 && multiple_of_p (type, arg0, arg1))
12307 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12308
12309 strict_overflow_p = false;
12310 if (TREE_CODE (arg1) == INTEGER_CST
12311 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12312 &strict_overflow_p)))
12313 {
12314 if (strict_overflow_p)
12315 fold_overflow_warning (("assuming signed overflow does not occur "
12316 "when simplifying division"),
12317 WARN_STRICT_OVERFLOW_MISC);
12318 return fold_convert_loc (loc, type, tem);
12319 }
12320
12321 return NULL_TREE;
12322
12323 case CEIL_MOD_EXPR:
12324 case FLOOR_MOD_EXPR:
12325 case ROUND_MOD_EXPR:
12326 case TRUNC_MOD_EXPR:
12327 /* X % 1 is always zero, but be sure to preserve any side
12328 effects in X. */
12329 if (integer_onep (arg1))
12330 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12331
12332 /* X % 0, return X % 0 unchanged so that we can get the
12333 proper warnings and errors. */
12334 if (integer_zerop (arg1))
12335 return NULL_TREE;
12336
12337 /* 0 % X is always zero, but be sure to preserve any side
12338 effects in X. Place this after checking for X == 0. */
12339 if (integer_zerop (arg0))
12340 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12341
12342 /* X % -1 is zero. */
12343 if (!TYPE_UNSIGNED (type)
12344 && TREE_CODE (arg1) == INTEGER_CST
12345 && TREE_INT_CST_LOW (arg1) == (unsigned HOST_WIDE_INT) -1
12346 && TREE_INT_CST_HIGH (arg1) == -1)
12347 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12348
12349 /* X % -C is the same as X % C. */
12350 if (code == TRUNC_MOD_EXPR
12351 && !TYPE_UNSIGNED (type)
12352 && TREE_CODE (arg1) == INTEGER_CST
12353 && !TREE_OVERFLOW (arg1)
12354 && TREE_INT_CST_HIGH (arg1) < 0
12355 && !TYPE_OVERFLOW_TRAPS (type)
12356 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12357 && !sign_bit_p (arg1, arg1))
12358 return fold_build2_loc (loc, code, type,
12359 fold_convert_loc (loc, type, arg0),
12360 fold_convert_loc (loc, type,
12361 negate_expr (arg1)));
12362
12363 /* X % -Y is the same as X % Y. */
12364 if (code == TRUNC_MOD_EXPR
12365 && !TYPE_UNSIGNED (type)
12366 && TREE_CODE (arg1) == NEGATE_EXPR
12367 && !TYPE_OVERFLOW_TRAPS (type))
12368 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12369 fold_convert_loc (loc, type,
12370 TREE_OPERAND (arg1, 0)));
12371
12372 strict_overflow_p = false;
12373 if (TREE_CODE (arg1) == INTEGER_CST
12374 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12375 &strict_overflow_p)))
12376 {
12377 if (strict_overflow_p)
12378 fold_overflow_warning (("assuming signed overflow does not occur "
12379 "when simplifying modulus"),
12380 WARN_STRICT_OVERFLOW_MISC);
12381 return fold_convert_loc (loc, type, tem);
12382 }
12383
12384 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12385 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12386 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12387 && (TYPE_UNSIGNED (type)
12388 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12389 {
12390 tree c = arg1;
12391 /* Also optimize A % (C << N) where C is a power of 2,
12392 to A & ((C << N) - 1). */
12393 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12394 c = TREE_OPERAND (arg1, 0);
12395
12396 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12397 {
12398 tree mask
12399 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12400 build_int_cst (TREE_TYPE (arg1), 1));
12401 if (strict_overflow_p)
12402 fold_overflow_warning (("assuming signed overflow does not "
12403 "occur when simplifying "
12404 "X % (power of two)"),
12405 WARN_STRICT_OVERFLOW_MISC);
12406 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12407 fold_convert_loc (loc, type, arg0),
12408 fold_convert_loc (loc, type, mask));
12409 }
12410 }
12411
12412 return NULL_TREE;
12413
12414 case LROTATE_EXPR:
12415 case RROTATE_EXPR:
12416 if (integer_all_onesp (arg0))
12417 return omit_one_operand_loc (loc, type, arg0, arg1);
12418 goto shift;
12419
12420 case RSHIFT_EXPR:
12421 /* Optimize -1 >> x for arithmetic right shifts. */
12422 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12423 && tree_expr_nonnegative_p (arg1))
12424 return omit_one_operand_loc (loc, type, arg0, arg1);
12425 /* ... fall through ... */
12426
12427 case LSHIFT_EXPR:
12428 shift:
12429 if (integer_zerop (arg1))
12430 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12431 if (integer_zerop (arg0))
12432 return omit_one_operand_loc (loc, type, arg0, arg1);
12433
12434 /* Prefer vector1 << scalar to vector1 << vector2
12435 if vector2 is uniform. */
12436 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12437 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12438 return fold_build2_loc (loc, code, type, op0, tem);
12439
12440 /* Since negative shift count is not well-defined,
12441 don't try to compute it in the compiler. */
12442 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12443 return NULL_TREE;
12444
12445 prec = element_precision (type);
12446
12447 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12448 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12449 && TREE_INT_CST_LOW (arg1) < prec
12450 && host_integerp (TREE_OPERAND (arg0, 1), true)
12451 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12452 {
12453 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12454 + TREE_INT_CST_LOW (arg1));
12455
12456 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12457 being well defined. */
12458 if (low >= prec)
12459 {
12460 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12461 low = low % prec;
12462 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12463 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12464 TREE_OPERAND (arg0, 0));
12465 else
12466 low = prec - 1;
12467 }
12468
12469 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12470 build_int_cst (TREE_TYPE (arg1), low));
12471 }
12472
12473 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12474 into x & ((unsigned)-1 >> c) for unsigned types. */
12475 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12476 || (TYPE_UNSIGNED (type)
12477 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12478 && host_integerp (arg1, false)
12479 && TREE_INT_CST_LOW (arg1) < prec
12480 && host_integerp (TREE_OPERAND (arg0, 1), false)
12481 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12482 {
12483 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12484 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12485 tree lshift;
12486 tree arg00;
12487
12488 if (low0 == low1)
12489 {
12490 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12491
12492 lshift = build_minus_one_cst (type);
12493 lshift = const_binop (code, lshift, arg1);
12494
12495 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12496 }
12497 }
12498
12499 /* Rewrite an LROTATE_EXPR by a constant into an
12500 RROTATE_EXPR by a new constant. */
12501 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12502 {
12503 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12504 tem = const_binop (MINUS_EXPR, tem, arg1);
12505 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12506 }
12507
12508 /* If we have a rotate of a bit operation with the rotate count and
12509 the second operand of the bit operation both constant,
12510 permute the two operations. */
12511 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12512 && (TREE_CODE (arg0) == BIT_AND_EXPR
12513 || TREE_CODE (arg0) == BIT_IOR_EXPR
12514 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12515 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12516 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12517 fold_build2_loc (loc, code, type,
12518 TREE_OPERAND (arg0, 0), arg1),
12519 fold_build2_loc (loc, code, type,
12520 TREE_OPERAND (arg0, 1), arg1));
12521
12522 /* Two consecutive rotates adding up to the precision of the
12523 type can be ignored. */
12524 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12525 && TREE_CODE (arg0) == RROTATE_EXPR
12526 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12527 && TREE_INT_CST_HIGH (arg1) == 0
12528 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12529 && ((TREE_INT_CST_LOW (arg1)
12530 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12531 == prec))
12532 return TREE_OPERAND (arg0, 0);
12533
12534 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12535 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12536 if the latter can be further optimized. */
12537 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12538 && TREE_CODE (arg0) == BIT_AND_EXPR
12539 && TREE_CODE (arg1) == INTEGER_CST
12540 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12541 {
12542 tree mask = fold_build2_loc (loc, code, type,
12543 fold_convert_loc (loc, type,
12544 TREE_OPERAND (arg0, 1)),
12545 arg1);
12546 tree shift = fold_build2_loc (loc, code, type,
12547 fold_convert_loc (loc, type,
12548 TREE_OPERAND (arg0, 0)),
12549 arg1);
12550 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12551 if (tem)
12552 return tem;
12553 }
12554
12555 return NULL_TREE;
12556
12557 case MIN_EXPR:
12558 if (operand_equal_p (arg0, arg1, 0))
12559 return omit_one_operand_loc (loc, type, arg0, arg1);
12560 if (INTEGRAL_TYPE_P (type)
12561 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12562 return omit_one_operand_loc (loc, type, arg1, arg0);
12563 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12564 if (tem)
12565 return tem;
12566 goto associate;
12567
12568 case MAX_EXPR:
12569 if (operand_equal_p (arg0, arg1, 0))
12570 return omit_one_operand_loc (loc, type, arg0, arg1);
12571 if (INTEGRAL_TYPE_P (type)
12572 && TYPE_MAX_VALUE (type)
12573 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12574 return omit_one_operand_loc (loc, type, arg1, arg0);
12575 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12576 if (tem)
12577 return tem;
12578 goto associate;
12579
12580 case TRUTH_ANDIF_EXPR:
12581 /* Note that the operands of this must be ints
12582 and their values must be 0 or 1.
12583 ("true" is a fixed value perhaps depending on the language.) */
12584 /* If first arg is constant zero, return it. */
12585 if (integer_zerop (arg0))
12586 return fold_convert_loc (loc, type, arg0);
12587 case TRUTH_AND_EXPR:
12588 /* If either arg is constant true, drop it. */
12589 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12590 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12591 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12592 /* Preserve sequence points. */
12593 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12594 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12595 /* If second arg is constant zero, result is zero, but first arg
12596 must be evaluated. */
12597 if (integer_zerop (arg1))
12598 return omit_one_operand_loc (loc, type, arg1, arg0);
12599 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12600 case will be handled here. */
12601 if (integer_zerop (arg0))
12602 return omit_one_operand_loc (loc, type, arg0, arg1);
12603
12604 /* !X && X is always false. */
12605 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12607 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12608 /* X && !X is always false. */
12609 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12610 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12611 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12612
12613 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12614 means A >= Y && A != MAX, but in this case we know that
12615 A < X <= MAX. */
12616
12617 if (!TREE_SIDE_EFFECTS (arg0)
12618 && !TREE_SIDE_EFFECTS (arg1))
12619 {
12620 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12621 if (tem && !operand_equal_p (tem, arg0, 0))
12622 return fold_build2_loc (loc, code, type, tem, arg1);
12623
12624 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12625 if (tem && !operand_equal_p (tem, arg1, 0))
12626 return fold_build2_loc (loc, code, type, arg0, tem);
12627 }
12628
12629 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12630 != NULL_TREE)
12631 return tem;
12632
12633 return NULL_TREE;
12634
12635 case TRUTH_ORIF_EXPR:
12636 /* Note that the operands of this must be ints
12637 and their values must be 0 or true.
12638 ("true" is a fixed value perhaps depending on the language.) */
12639 /* If first arg is constant true, return it. */
12640 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12641 return fold_convert_loc (loc, type, arg0);
12642 case TRUTH_OR_EXPR:
12643 /* If either arg is constant zero, drop it. */
12644 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12645 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12646 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12647 /* Preserve sequence points. */
12648 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12649 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12650 /* If second arg is constant true, result is true, but we must
12651 evaluate first arg. */
12652 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12653 return omit_one_operand_loc (loc, type, arg1, arg0);
12654 /* Likewise for first arg, but note this only occurs here for
12655 TRUTH_OR_EXPR. */
12656 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12657 return omit_one_operand_loc (loc, type, arg0, arg1);
12658
12659 /* !X || X is always true. */
12660 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12661 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12662 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12663 /* X || !X is always true. */
12664 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12665 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12666 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12667
12668 /* (X && !Y) || (!X && Y) is X ^ Y */
12669 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12670 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12671 {
12672 tree a0, a1, l0, l1, n0, n1;
12673
12674 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12675 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12676
12677 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12678 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12679
12680 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12681 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12682
12683 if ((operand_equal_p (n0, a0, 0)
12684 && operand_equal_p (n1, a1, 0))
12685 || (operand_equal_p (n0, a1, 0)
12686 && operand_equal_p (n1, a0, 0)))
12687 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12688 }
12689
12690 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12691 != NULL_TREE)
12692 return tem;
12693
12694 return NULL_TREE;
12695
12696 case TRUTH_XOR_EXPR:
12697 /* If the second arg is constant zero, drop it. */
12698 if (integer_zerop (arg1))
12699 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12700 /* If the second arg is constant true, this is a logical inversion. */
12701 if (integer_onep (arg1))
12702 {
12703 tem = invert_truthvalue_loc (loc, arg0);
12704 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12705 }
12706 /* Identical arguments cancel to zero. */
12707 if (operand_equal_p (arg0, arg1, 0))
12708 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12709
12710 /* !X ^ X is always true. */
12711 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12712 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12713 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12714
12715 /* X ^ !X is always true. */
12716 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12718 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12719
12720 return NULL_TREE;
12721
12722 case EQ_EXPR:
12723 case NE_EXPR:
12724 STRIP_NOPS (arg0);
12725 STRIP_NOPS (arg1);
12726
12727 tem = fold_comparison (loc, code, type, op0, op1);
12728 if (tem != NULL_TREE)
12729 return tem;
12730
12731 /* bool_var != 0 becomes bool_var. */
12732 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12733 && code == NE_EXPR)
12734 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12735
12736 /* bool_var == 1 becomes bool_var. */
12737 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12738 && code == EQ_EXPR)
12739 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12740
12741 /* bool_var != 1 becomes !bool_var. */
12742 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12743 && code == NE_EXPR)
12744 return fold_convert_loc (loc, type,
12745 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12746 TREE_TYPE (arg0), arg0));
12747
12748 /* bool_var == 0 becomes !bool_var. */
12749 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12750 && code == EQ_EXPR)
12751 return fold_convert_loc (loc, type,
12752 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12753 TREE_TYPE (arg0), arg0));
12754
12755 /* !exp != 0 becomes !exp */
12756 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12757 && code == NE_EXPR)
12758 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12759
12760 /* If this is an equality comparison of the address of two non-weak,
12761 unaliased symbols neither of which are extern (since we do not
12762 have access to attributes for externs), then we know the result. */
12763 if (TREE_CODE (arg0) == ADDR_EXPR
12764 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12765 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12766 && ! lookup_attribute ("alias",
12767 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12768 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12769 && TREE_CODE (arg1) == ADDR_EXPR
12770 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12771 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12772 && ! lookup_attribute ("alias",
12773 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12774 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12775 {
12776 /* We know that we're looking at the address of two
12777 non-weak, unaliased, static _DECL nodes.
12778
12779 It is both wasteful and incorrect to call operand_equal_p
12780 to compare the two ADDR_EXPR nodes. It is wasteful in that
12781 all we need to do is test pointer equality for the arguments
12782 to the two ADDR_EXPR nodes. It is incorrect to use
12783 operand_equal_p as that function is NOT equivalent to a
12784 C equality test. It can in fact return false for two
12785 objects which would test as equal using the C equality
12786 operator. */
12787 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12788 return constant_boolean_node (equal
12789 ? code == EQ_EXPR : code != EQ_EXPR,
12790 type);
12791 }
12792
12793 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12794 a MINUS_EXPR of a constant, we can convert it into a comparison with
12795 a revised constant as long as no overflow occurs. */
12796 if (TREE_CODE (arg1) == INTEGER_CST
12797 && (TREE_CODE (arg0) == PLUS_EXPR
12798 || TREE_CODE (arg0) == MINUS_EXPR)
12799 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12800 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
12801 ? MINUS_EXPR : PLUS_EXPR,
12802 fold_convert_loc (loc, TREE_TYPE (arg0),
12803 arg1),
12804 TREE_OPERAND (arg0, 1)))
12805 && !TREE_OVERFLOW (tem))
12806 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12807
12808 /* Similarly for a NEGATE_EXPR. */
12809 if (TREE_CODE (arg0) == NEGATE_EXPR
12810 && TREE_CODE (arg1) == INTEGER_CST
12811 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12812 arg1)))
12813 && TREE_CODE (tem) == INTEGER_CST
12814 && !TREE_OVERFLOW (tem))
12815 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12816
12817 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12818 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12819 && TREE_CODE (arg1) == INTEGER_CST
12820 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12821 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12822 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12823 fold_convert_loc (loc,
12824 TREE_TYPE (arg0),
12825 arg1),
12826 TREE_OPERAND (arg0, 1)));
12827
12828 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12829 if ((TREE_CODE (arg0) == PLUS_EXPR
12830 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12831 || TREE_CODE (arg0) == MINUS_EXPR)
12832 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12833 0)),
12834 arg1, 0)
12835 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12836 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12837 {
12838 tree val = TREE_OPERAND (arg0, 1);
12839 return omit_two_operands_loc (loc, type,
12840 fold_build2_loc (loc, code, type,
12841 val,
12842 build_int_cst (TREE_TYPE (val),
12843 0)),
12844 TREE_OPERAND (arg0, 0), arg1);
12845 }
12846
12847 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12848 if (TREE_CODE (arg0) == MINUS_EXPR
12849 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12850 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12851 1)),
12852 arg1, 0)
12853 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
12854 {
12855 return omit_two_operands_loc (loc, type,
12856 code == NE_EXPR
12857 ? boolean_true_node : boolean_false_node,
12858 TREE_OPERAND (arg0, 1), arg1);
12859 }
12860
12861 /* If we have X - Y == 0, we can convert that to X == Y and similarly
12862 for !=. Don't do this for ordered comparisons due to overflow. */
12863 if (TREE_CODE (arg0) == MINUS_EXPR
12864 && integer_zerop (arg1))
12865 return fold_build2_loc (loc, code, type,
12866 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
12867
12868 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12869 if (TREE_CODE (arg0) == ABS_EXPR
12870 && (integer_zerop (arg1) || real_zerop (arg1)))
12871 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12872
12873 /* If this is an EQ or NE comparison with zero and ARG0 is
12874 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12875 two operations, but the latter can be done in one less insn
12876 on machines that have only two-operand insns or on which a
12877 constant cannot be the first operand. */
12878 if (TREE_CODE (arg0) == BIT_AND_EXPR
12879 && integer_zerop (arg1))
12880 {
12881 tree arg00 = TREE_OPERAND (arg0, 0);
12882 tree arg01 = TREE_OPERAND (arg0, 1);
12883 if (TREE_CODE (arg00) == LSHIFT_EXPR
12884 && integer_onep (TREE_OPERAND (arg00, 0)))
12885 {
12886 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12887 arg01, TREE_OPERAND (arg00, 1));
12888 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12889 build_int_cst (TREE_TYPE (arg0), 1));
12890 return fold_build2_loc (loc, code, type,
12891 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12892 arg1);
12893 }
12894 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12895 && integer_onep (TREE_OPERAND (arg01, 0)))
12896 {
12897 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12898 arg00, TREE_OPERAND (arg01, 1));
12899 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12900 build_int_cst (TREE_TYPE (arg0), 1));
12901 return fold_build2_loc (loc, code, type,
12902 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12903 arg1);
12904 }
12905 }
12906
12907 /* If this is an NE or EQ comparison of zero against the result of a
12908 signed MOD operation whose second operand is a power of 2, make
12909 the MOD operation unsigned since it is simpler and equivalent. */
12910 if (integer_zerop (arg1)
12911 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12912 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12913 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12914 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12915 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12916 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12917 {
12918 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12919 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12920 fold_convert_loc (loc, newtype,
12921 TREE_OPERAND (arg0, 0)),
12922 fold_convert_loc (loc, newtype,
12923 TREE_OPERAND (arg0, 1)));
12924
12925 return fold_build2_loc (loc, code, type, newmod,
12926 fold_convert_loc (loc, newtype, arg1));
12927 }
12928
12929 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12930 C1 is a valid shift constant, and C2 is a power of two, i.e.
12931 a single bit. */
12932 if (TREE_CODE (arg0) == BIT_AND_EXPR
12933 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12934 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12935 == INTEGER_CST
12936 && integer_pow2p (TREE_OPERAND (arg0, 1))
12937 && integer_zerop (arg1))
12938 {
12939 tree itype = TREE_TYPE (arg0);
12940 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12941 prec = TYPE_PRECISION (itype);
12942
12943 /* Check for a valid shift count. */
12944 if (TREE_INT_CST_HIGH (arg001) == 0
12945 && TREE_INT_CST_LOW (arg001) < prec)
12946 {
12947 tree arg01 = TREE_OPERAND (arg0, 1);
12948 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12949 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12950 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12951 can be rewritten as (X & (C2 << C1)) != 0. */
12952 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12953 {
12954 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12955 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12956 return fold_build2_loc (loc, code, type, tem,
12957 fold_convert_loc (loc, itype, arg1));
12958 }
12959 /* Otherwise, for signed (arithmetic) shifts,
12960 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12961 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12962 else if (!TYPE_UNSIGNED (itype))
12963 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12964 arg000, build_int_cst (itype, 0));
12965 /* Otherwise, of unsigned (logical) shifts,
12966 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12967 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12968 else
12969 return omit_one_operand_loc (loc, type,
12970 code == EQ_EXPR ? integer_one_node
12971 : integer_zero_node,
12972 arg000);
12973 }
12974 }
12975
12976 /* If we have (A & C) == C where C is a power of 2, convert this into
12977 (A & C) != 0. Similarly for NE_EXPR. */
12978 if (TREE_CODE (arg0) == BIT_AND_EXPR
12979 && integer_pow2p (TREE_OPERAND (arg0, 1))
12980 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12981 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12982 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12983 integer_zero_node));
12984
12985 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12986 bit, then fold the expression into A < 0 or A >= 0. */
12987 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12988 if (tem)
12989 return tem;
12990
12991 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12992 Similarly for NE_EXPR. */
12993 if (TREE_CODE (arg0) == BIT_AND_EXPR
12994 && TREE_CODE (arg1) == INTEGER_CST
12995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12996 {
12997 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12998 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12999 TREE_OPERAND (arg0, 1));
13000 tree dandnotc
13001 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13002 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13003 notc);
13004 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13005 if (integer_nonzerop (dandnotc))
13006 return omit_one_operand_loc (loc, type, rslt, arg0);
13007 }
13008
13009 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13010 Similarly for NE_EXPR. */
13011 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13012 && TREE_CODE (arg1) == INTEGER_CST
13013 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13014 {
13015 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13016 tree candnotd
13017 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13018 TREE_OPERAND (arg0, 1),
13019 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13020 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13021 if (integer_nonzerop (candnotd))
13022 return omit_one_operand_loc (loc, type, rslt, arg0);
13023 }
13024
13025 /* If this is a comparison of a field, we may be able to simplify it. */
13026 if ((TREE_CODE (arg0) == COMPONENT_REF
13027 || TREE_CODE (arg0) == BIT_FIELD_REF)
13028 /* Handle the constant case even without -O
13029 to make sure the warnings are given. */
13030 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13031 {
13032 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13033 if (t1)
13034 return t1;
13035 }
13036
13037 /* Optimize comparisons of strlen vs zero to a compare of the
13038 first character of the string vs zero. To wit,
13039 strlen(ptr) == 0 => *ptr == 0
13040 strlen(ptr) != 0 => *ptr != 0
13041 Other cases should reduce to one of these two (or a constant)
13042 due to the return value of strlen being unsigned. */
13043 if (TREE_CODE (arg0) == CALL_EXPR
13044 && integer_zerop (arg1))
13045 {
13046 tree fndecl = get_callee_fndecl (arg0);
13047
13048 if (fndecl
13049 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13050 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13051 && call_expr_nargs (arg0) == 1
13052 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13053 {
13054 tree iref = build_fold_indirect_ref_loc (loc,
13055 CALL_EXPR_ARG (arg0, 0));
13056 return fold_build2_loc (loc, code, type, iref,
13057 build_int_cst (TREE_TYPE (iref), 0));
13058 }
13059 }
13060
13061 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13062 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13063 if (TREE_CODE (arg0) == RSHIFT_EXPR
13064 && integer_zerop (arg1)
13065 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13066 {
13067 tree arg00 = TREE_OPERAND (arg0, 0);
13068 tree arg01 = TREE_OPERAND (arg0, 1);
13069 tree itype = TREE_TYPE (arg00);
13070 if (TREE_INT_CST_HIGH (arg01) == 0
13071 && TREE_INT_CST_LOW (arg01)
13072 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13073 {
13074 if (TYPE_UNSIGNED (itype))
13075 {
13076 itype = signed_type_for (itype);
13077 arg00 = fold_convert_loc (loc, itype, arg00);
13078 }
13079 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13080 type, arg00, build_zero_cst (itype));
13081 }
13082 }
13083
13084 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13085 if (integer_zerop (arg1)
13086 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13087 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13088 TREE_OPERAND (arg0, 1));
13089
13090 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13091 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13092 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13093 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13094 build_zero_cst (TREE_TYPE (arg0)));
13095 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13096 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13097 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13098 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13099 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13100 build_zero_cst (TREE_TYPE (arg0)));
13101
13102 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13103 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13104 && TREE_CODE (arg1) == INTEGER_CST
13105 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13106 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13107 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13108 TREE_OPERAND (arg0, 1), arg1));
13109
13110 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13111 (X & C) == 0 when C is a single bit. */
13112 if (TREE_CODE (arg0) == BIT_AND_EXPR
13113 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13114 && integer_zerop (arg1)
13115 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13116 {
13117 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13118 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13119 TREE_OPERAND (arg0, 1));
13120 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13121 type, tem,
13122 fold_convert_loc (loc, TREE_TYPE (arg0),
13123 arg1));
13124 }
13125
13126 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13127 constant C is a power of two, i.e. a single bit. */
13128 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13129 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13130 && integer_zerop (arg1)
13131 && integer_pow2p (TREE_OPERAND (arg0, 1))
13132 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13133 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13134 {
13135 tree arg00 = TREE_OPERAND (arg0, 0);
13136 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13137 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13138 }
13139
13140 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13141 when is C is a power of two, i.e. a single bit. */
13142 if (TREE_CODE (arg0) == BIT_AND_EXPR
13143 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13144 && integer_zerop (arg1)
13145 && integer_pow2p (TREE_OPERAND (arg0, 1))
13146 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13147 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13148 {
13149 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13150 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13151 arg000, TREE_OPERAND (arg0, 1));
13152 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13153 tem, build_int_cst (TREE_TYPE (tem), 0));
13154 }
13155
13156 if (integer_zerop (arg1)
13157 && tree_expr_nonzero_p (arg0))
13158 {
13159 tree res = constant_boolean_node (code==NE_EXPR, type);
13160 return omit_one_operand_loc (loc, type, res, arg0);
13161 }
13162
13163 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13164 if (TREE_CODE (arg0) == NEGATE_EXPR
13165 && TREE_CODE (arg1) == NEGATE_EXPR)
13166 return fold_build2_loc (loc, code, type,
13167 TREE_OPERAND (arg0, 0),
13168 fold_convert_loc (loc, TREE_TYPE (arg0),
13169 TREE_OPERAND (arg1, 0)));
13170
13171 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13172 if (TREE_CODE (arg0) == BIT_AND_EXPR
13173 && TREE_CODE (arg1) == BIT_AND_EXPR)
13174 {
13175 tree arg00 = TREE_OPERAND (arg0, 0);
13176 tree arg01 = TREE_OPERAND (arg0, 1);
13177 tree arg10 = TREE_OPERAND (arg1, 0);
13178 tree arg11 = TREE_OPERAND (arg1, 1);
13179 tree itype = TREE_TYPE (arg0);
13180
13181 if (operand_equal_p (arg01, arg11, 0))
13182 return fold_build2_loc (loc, code, type,
13183 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13184 fold_build2_loc (loc,
13185 BIT_XOR_EXPR, itype,
13186 arg00, arg10),
13187 arg01),
13188 build_zero_cst (itype));
13189
13190 if (operand_equal_p (arg01, arg10, 0))
13191 return fold_build2_loc (loc, code, type,
13192 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13193 fold_build2_loc (loc,
13194 BIT_XOR_EXPR, itype,
13195 arg00, arg11),
13196 arg01),
13197 build_zero_cst (itype));
13198
13199 if (operand_equal_p (arg00, arg11, 0))
13200 return fold_build2_loc (loc, code, type,
13201 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13202 fold_build2_loc (loc,
13203 BIT_XOR_EXPR, itype,
13204 arg01, arg10),
13205 arg00),
13206 build_zero_cst (itype));
13207
13208 if (operand_equal_p (arg00, arg10, 0))
13209 return fold_build2_loc (loc, code, type,
13210 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13211 fold_build2_loc (loc,
13212 BIT_XOR_EXPR, itype,
13213 arg01, arg11),
13214 arg00),
13215 build_zero_cst (itype));
13216 }
13217
13218 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13219 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13220 {
13221 tree arg00 = TREE_OPERAND (arg0, 0);
13222 tree arg01 = TREE_OPERAND (arg0, 1);
13223 tree arg10 = TREE_OPERAND (arg1, 0);
13224 tree arg11 = TREE_OPERAND (arg1, 1);
13225 tree itype = TREE_TYPE (arg0);
13226
13227 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13228 operand_equal_p guarantees no side-effects so we don't need
13229 to use omit_one_operand on Z. */
13230 if (operand_equal_p (arg01, arg11, 0))
13231 return fold_build2_loc (loc, code, type, arg00,
13232 fold_convert_loc (loc, TREE_TYPE (arg00),
13233 arg10));
13234 if (operand_equal_p (arg01, arg10, 0))
13235 return fold_build2_loc (loc, code, type, arg00,
13236 fold_convert_loc (loc, TREE_TYPE (arg00),
13237 arg11));
13238 if (operand_equal_p (arg00, arg11, 0))
13239 return fold_build2_loc (loc, code, type, arg01,
13240 fold_convert_loc (loc, TREE_TYPE (arg01),
13241 arg10));
13242 if (operand_equal_p (arg00, arg10, 0))
13243 return fold_build2_loc (loc, code, type, arg01,
13244 fold_convert_loc (loc, TREE_TYPE (arg01),
13245 arg11));
13246
13247 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13248 if (TREE_CODE (arg01) == INTEGER_CST
13249 && TREE_CODE (arg11) == INTEGER_CST)
13250 {
13251 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13252 fold_convert_loc (loc, itype, arg11));
13253 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13254 return fold_build2_loc (loc, code, type, tem,
13255 fold_convert_loc (loc, itype, arg10));
13256 }
13257 }
13258
13259 /* Attempt to simplify equality/inequality comparisons of complex
13260 values. Only lower the comparison if the result is known or
13261 can be simplified to a single scalar comparison. */
13262 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13263 || TREE_CODE (arg0) == COMPLEX_CST)
13264 && (TREE_CODE (arg1) == COMPLEX_EXPR
13265 || TREE_CODE (arg1) == COMPLEX_CST))
13266 {
13267 tree real0, imag0, real1, imag1;
13268 tree rcond, icond;
13269
13270 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13271 {
13272 real0 = TREE_OPERAND (arg0, 0);
13273 imag0 = TREE_OPERAND (arg0, 1);
13274 }
13275 else
13276 {
13277 real0 = TREE_REALPART (arg0);
13278 imag0 = TREE_IMAGPART (arg0);
13279 }
13280
13281 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13282 {
13283 real1 = TREE_OPERAND (arg1, 0);
13284 imag1 = TREE_OPERAND (arg1, 1);
13285 }
13286 else
13287 {
13288 real1 = TREE_REALPART (arg1);
13289 imag1 = TREE_IMAGPART (arg1);
13290 }
13291
13292 rcond = fold_binary_loc (loc, code, type, real0, real1);
13293 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13294 {
13295 if (integer_zerop (rcond))
13296 {
13297 if (code == EQ_EXPR)
13298 return omit_two_operands_loc (loc, type, boolean_false_node,
13299 imag0, imag1);
13300 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13301 }
13302 else
13303 {
13304 if (code == NE_EXPR)
13305 return omit_two_operands_loc (loc, type, boolean_true_node,
13306 imag0, imag1);
13307 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13308 }
13309 }
13310
13311 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13312 if (icond && TREE_CODE (icond) == INTEGER_CST)
13313 {
13314 if (integer_zerop (icond))
13315 {
13316 if (code == EQ_EXPR)
13317 return omit_two_operands_loc (loc, type, boolean_false_node,
13318 real0, real1);
13319 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13320 }
13321 else
13322 {
13323 if (code == NE_EXPR)
13324 return omit_two_operands_loc (loc, type, boolean_true_node,
13325 real0, real1);
13326 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13327 }
13328 }
13329 }
13330
13331 return NULL_TREE;
13332
13333 case LT_EXPR:
13334 case GT_EXPR:
13335 case LE_EXPR:
13336 case GE_EXPR:
13337 tem = fold_comparison (loc, code, type, op0, op1);
13338 if (tem != NULL_TREE)
13339 return tem;
13340
13341 /* Transform comparisons of the form X +- C CMP X. */
13342 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13343 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13344 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13345 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13346 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13347 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13348 {
13349 tree arg01 = TREE_OPERAND (arg0, 1);
13350 enum tree_code code0 = TREE_CODE (arg0);
13351 int is_positive;
13352
13353 if (TREE_CODE (arg01) == REAL_CST)
13354 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13355 else
13356 is_positive = tree_int_cst_sgn (arg01);
13357
13358 /* (X - c) > X becomes false. */
13359 if (code == GT_EXPR
13360 && ((code0 == MINUS_EXPR && is_positive >= 0)
13361 || (code0 == PLUS_EXPR && is_positive <= 0)))
13362 {
13363 if (TREE_CODE (arg01) == INTEGER_CST
13364 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13365 fold_overflow_warning (("assuming signed overflow does not "
13366 "occur when assuming that (X - c) > X "
13367 "is always false"),
13368 WARN_STRICT_OVERFLOW_ALL);
13369 return constant_boolean_node (0, type);
13370 }
13371
13372 /* Likewise (X + c) < X becomes false. */
13373 if (code == LT_EXPR
13374 && ((code0 == PLUS_EXPR && is_positive >= 0)
13375 || (code0 == MINUS_EXPR && is_positive <= 0)))
13376 {
13377 if (TREE_CODE (arg01) == INTEGER_CST
13378 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13379 fold_overflow_warning (("assuming signed overflow does not "
13380 "occur when assuming that "
13381 "(X + c) < X is always false"),
13382 WARN_STRICT_OVERFLOW_ALL);
13383 return constant_boolean_node (0, type);
13384 }
13385
13386 /* Convert (X - c) <= X to true. */
13387 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13388 && code == LE_EXPR
13389 && ((code0 == MINUS_EXPR && is_positive >= 0)
13390 || (code0 == PLUS_EXPR && is_positive <= 0)))
13391 {
13392 if (TREE_CODE (arg01) == INTEGER_CST
13393 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13394 fold_overflow_warning (("assuming signed overflow does not "
13395 "occur when assuming that "
13396 "(X - c) <= X is always true"),
13397 WARN_STRICT_OVERFLOW_ALL);
13398 return constant_boolean_node (1, type);
13399 }
13400
13401 /* Convert (X + c) >= X to true. */
13402 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13403 && code == GE_EXPR
13404 && ((code0 == PLUS_EXPR && is_positive >= 0)
13405 || (code0 == MINUS_EXPR && is_positive <= 0)))
13406 {
13407 if (TREE_CODE (arg01) == INTEGER_CST
13408 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13409 fold_overflow_warning (("assuming signed overflow does not "
13410 "occur when assuming that "
13411 "(X + c) >= X is always true"),
13412 WARN_STRICT_OVERFLOW_ALL);
13413 return constant_boolean_node (1, type);
13414 }
13415
13416 if (TREE_CODE (arg01) == INTEGER_CST)
13417 {
13418 /* Convert X + c > X and X - c < X to true for integers. */
13419 if (code == GT_EXPR
13420 && ((code0 == PLUS_EXPR && is_positive > 0)
13421 || (code0 == MINUS_EXPR && is_positive < 0)))
13422 {
13423 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13424 fold_overflow_warning (("assuming signed overflow does "
13425 "not occur when assuming that "
13426 "(X + c) > X is always true"),
13427 WARN_STRICT_OVERFLOW_ALL);
13428 return constant_boolean_node (1, type);
13429 }
13430
13431 if (code == LT_EXPR
13432 && ((code0 == MINUS_EXPR && is_positive > 0)
13433 || (code0 == PLUS_EXPR && is_positive < 0)))
13434 {
13435 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13436 fold_overflow_warning (("assuming signed overflow does "
13437 "not occur when assuming that "
13438 "(X - c) < X is always true"),
13439 WARN_STRICT_OVERFLOW_ALL);
13440 return constant_boolean_node (1, type);
13441 }
13442
13443 /* Convert X + c <= X and X - c >= X to false for integers. */
13444 if (code == LE_EXPR
13445 && ((code0 == PLUS_EXPR && is_positive > 0)
13446 || (code0 == MINUS_EXPR && is_positive < 0)))
13447 {
13448 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13449 fold_overflow_warning (("assuming signed overflow does "
13450 "not occur when assuming that "
13451 "(X + c) <= X is always false"),
13452 WARN_STRICT_OVERFLOW_ALL);
13453 return constant_boolean_node (0, type);
13454 }
13455
13456 if (code == GE_EXPR
13457 && ((code0 == MINUS_EXPR && is_positive > 0)
13458 || (code0 == PLUS_EXPR && is_positive < 0)))
13459 {
13460 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13461 fold_overflow_warning (("assuming signed overflow does "
13462 "not occur when assuming that "
13463 "(X - c) >= X is always false"),
13464 WARN_STRICT_OVERFLOW_ALL);
13465 return constant_boolean_node (0, type);
13466 }
13467 }
13468 }
13469
13470 /* Comparisons with the highest or lowest possible integer of
13471 the specified precision will have known values. */
13472 {
13473 tree arg1_type = TREE_TYPE (arg1);
13474 unsigned int width = TYPE_PRECISION (arg1_type);
13475
13476 if (TREE_CODE (arg1) == INTEGER_CST
13477 && width <= HOST_BITS_PER_DOUBLE_INT
13478 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13479 {
13480 HOST_WIDE_INT signed_max_hi;
13481 unsigned HOST_WIDE_INT signed_max_lo;
13482 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13483
13484 if (width <= HOST_BITS_PER_WIDE_INT)
13485 {
13486 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13487 - 1;
13488 signed_max_hi = 0;
13489 max_hi = 0;
13490
13491 if (TYPE_UNSIGNED (arg1_type))
13492 {
13493 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13494 min_lo = 0;
13495 min_hi = 0;
13496 }
13497 else
13498 {
13499 max_lo = signed_max_lo;
13500 min_lo = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13501 min_hi = -1;
13502 }
13503 }
13504 else
13505 {
13506 width -= HOST_BITS_PER_WIDE_INT;
13507 signed_max_lo = -1;
13508 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13509 - 1;
13510 max_lo = -1;
13511 min_lo = 0;
13512
13513 if (TYPE_UNSIGNED (arg1_type))
13514 {
13515 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13516 min_hi = 0;
13517 }
13518 else
13519 {
13520 max_hi = signed_max_hi;
13521 min_hi = ((unsigned HOST_WIDE_INT) -1 << (width - 1));
13522 }
13523 }
13524
13525 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13526 && TREE_INT_CST_LOW (arg1) == max_lo)
13527 switch (code)
13528 {
13529 case GT_EXPR:
13530 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13531
13532 case GE_EXPR:
13533 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13534
13535 case LE_EXPR:
13536 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13537
13538 case LT_EXPR:
13539 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13540
13541 /* The GE_EXPR and LT_EXPR cases above are not normally
13542 reached because of previous transformations. */
13543
13544 default:
13545 break;
13546 }
13547 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13548 == max_hi
13549 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13550 switch (code)
13551 {
13552 case GT_EXPR:
13553 arg1 = const_binop (PLUS_EXPR, arg1,
13554 build_int_cst (TREE_TYPE (arg1), 1));
13555 return fold_build2_loc (loc, EQ_EXPR, type,
13556 fold_convert_loc (loc,
13557 TREE_TYPE (arg1), arg0),
13558 arg1);
13559 case LE_EXPR:
13560 arg1 = const_binop (PLUS_EXPR, arg1,
13561 build_int_cst (TREE_TYPE (arg1), 1));
13562 return fold_build2_loc (loc, NE_EXPR, type,
13563 fold_convert_loc (loc, TREE_TYPE (arg1),
13564 arg0),
13565 arg1);
13566 default:
13567 break;
13568 }
13569 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13570 == min_hi
13571 && TREE_INT_CST_LOW (arg1) == min_lo)
13572 switch (code)
13573 {
13574 case LT_EXPR:
13575 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13576
13577 case LE_EXPR:
13578 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13579
13580 case GE_EXPR:
13581 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13582
13583 case GT_EXPR:
13584 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13585
13586 default:
13587 break;
13588 }
13589 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13590 == min_hi
13591 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13592 switch (code)
13593 {
13594 case GE_EXPR:
13595 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13596 return fold_build2_loc (loc, NE_EXPR, type,
13597 fold_convert_loc (loc,
13598 TREE_TYPE (arg1), arg0),
13599 arg1);
13600 case LT_EXPR:
13601 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13602 return fold_build2_loc (loc, EQ_EXPR, type,
13603 fold_convert_loc (loc, TREE_TYPE (arg1),
13604 arg0),
13605 arg1);
13606 default:
13607 break;
13608 }
13609
13610 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13611 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13612 && TYPE_UNSIGNED (arg1_type)
13613 /* We will flip the signedness of the comparison operator
13614 associated with the mode of arg1, so the sign bit is
13615 specified by this mode. Check that arg1 is the signed
13616 max associated with this sign bit. */
13617 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13618 /* signed_type does not work on pointer types. */
13619 && INTEGRAL_TYPE_P (arg1_type))
13620 {
13621 /* The following case also applies to X < signed_max+1
13622 and X >= signed_max+1 because previous transformations. */
13623 if (code == LE_EXPR || code == GT_EXPR)
13624 {
13625 tree st;
13626 st = signed_type_for (TREE_TYPE (arg1));
13627 return fold_build2_loc (loc,
13628 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13629 type, fold_convert_loc (loc, st, arg0),
13630 build_int_cst (st, 0));
13631 }
13632 }
13633 }
13634 }
13635
13636 /* If we are comparing an ABS_EXPR with a constant, we can
13637 convert all the cases into explicit comparisons, but they may
13638 well not be faster than doing the ABS and one comparison.
13639 But ABS (X) <= C is a range comparison, which becomes a subtraction
13640 and a comparison, and is probably faster. */
13641 if (code == LE_EXPR
13642 && TREE_CODE (arg1) == INTEGER_CST
13643 && TREE_CODE (arg0) == ABS_EXPR
13644 && ! TREE_SIDE_EFFECTS (arg0)
13645 && (0 != (tem = negate_expr (arg1)))
13646 && TREE_CODE (tem) == INTEGER_CST
13647 && !TREE_OVERFLOW (tem))
13648 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13649 build2 (GE_EXPR, type,
13650 TREE_OPERAND (arg0, 0), tem),
13651 build2 (LE_EXPR, type,
13652 TREE_OPERAND (arg0, 0), arg1));
13653
13654 /* Convert ABS_EXPR<x> >= 0 to true. */
13655 strict_overflow_p = false;
13656 if (code == GE_EXPR
13657 && (integer_zerop (arg1)
13658 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13659 && real_zerop (arg1)))
13660 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13661 {
13662 if (strict_overflow_p)
13663 fold_overflow_warning (("assuming signed overflow does not occur "
13664 "when simplifying comparison of "
13665 "absolute value and zero"),
13666 WARN_STRICT_OVERFLOW_CONDITIONAL);
13667 return omit_one_operand_loc (loc, type,
13668 constant_boolean_node (true, type),
13669 arg0);
13670 }
13671
13672 /* Convert ABS_EXPR<x> < 0 to false. */
13673 strict_overflow_p = false;
13674 if (code == LT_EXPR
13675 && (integer_zerop (arg1) || real_zerop (arg1))
13676 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13677 {
13678 if (strict_overflow_p)
13679 fold_overflow_warning (("assuming signed overflow does not occur "
13680 "when simplifying comparison of "
13681 "absolute value and zero"),
13682 WARN_STRICT_OVERFLOW_CONDITIONAL);
13683 return omit_one_operand_loc (loc, type,
13684 constant_boolean_node (false, type),
13685 arg0);
13686 }
13687
13688 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13689 and similarly for >= into !=. */
13690 if ((code == LT_EXPR || code == GE_EXPR)
13691 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13692 && TREE_CODE (arg1) == LSHIFT_EXPR
13693 && integer_onep (TREE_OPERAND (arg1, 0)))
13694 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13695 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13696 TREE_OPERAND (arg1, 1)),
13697 build_zero_cst (TREE_TYPE (arg0)));
13698
13699 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13700 otherwise Y might be >= # of bits in X's type and thus e.g.
13701 (unsigned char) (1 << Y) for Y 15 might be 0.
13702 If the cast is widening, then 1 << Y should have unsigned type,
13703 otherwise if Y is number of bits in the signed shift type minus 1,
13704 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13705 31 might be 0xffffffff80000000. */
13706 if ((code == LT_EXPR || code == GE_EXPR)
13707 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13708 && CONVERT_EXPR_P (arg1)
13709 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13710 && (TYPE_PRECISION (TREE_TYPE (arg1))
13711 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13712 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13713 || (TYPE_PRECISION (TREE_TYPE (arg1))
13714 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13715 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13716 {
13717 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13718 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13719 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13720 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13721 build_zero_cst (TREE_TYPE (arg0)));
13722 }
13723
13724 return NULL_TREE;
13725
13726 case UNORDERED_EXPR:
13727 case ORDERED_EXPR:
13728 case UNLT_EXPR:
13729 case UNLE_EXPR:
13730 case UNGT_EXPR:
13731 case UNGE_EXPR:
13732 case UNEQ_EXPR:
13733 case LTGT_EXPR:
13734 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13735 {
13736 t1 = fold_relational_const (code, type, arg0, arg1);
13737 if (t1 != NULL_TREE)
13738 return t1;
13739 }
13740
13741 /* If the first operand is NaN, the result is constant. */
13742 if (TREE_CODE (arg0) == REAL_CST
13743 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13744 && (code != LTGT_EXPR || ! flag_trapping_math))
13745 {
13746 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13747 ? integer_zero_node
13748 : integer_one_node;
13749 return omit_one_operand_loc (loc, type, t1, arg1);
13750 }
13751
13752 /* If the second operand is NaN, the result is constant. */
13753 if (TREE_CODE (arg1) == REAL_CST
13754 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13755 && (code != LTGT_EXPR || ! flag_trapping_math))
13756 {
13757 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13758 ? integer_zero_node
13759 : integer_one_node;
13760 return omit_one_operand_loc (loc, type, t1, arg0);
13761 }
13762
13763 /* Simplify unordered comparison of something with itself. */
13764 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13765 && operand_equal_p (arg0, arg1, 0))
13766 return constant_boolean_node (1, type);
13767
13768 if (code == LTGT_EXPR
13769 && !flag_trapping_math
13770 && operand_equal_p (arg0, arg1, 0))
13771 return constant_boolean_node (0, type);
13772
13773 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13774 {
13775 tree targ0 = strip_float_extensions (arg0);
13776 tree targ1 = strip_float_extensions (arg1);
13777 tree newtype = TREE_TYPE (targ0);
13778
13779 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13780 newtype = TREE_TYPE (targ1);
13781
13782 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13783 return fold_build2_loc (loc, code, type,
13784 fold_convert_loc (loc, newtype, targ0),
13785 fold_convert_loc (loc, newtype, targ1));
13786 }
13787
13788 return NULL_TREE;
13789
13790 case COMPOUND_EXPR:
13791 /* When pedantic, a compound expression can be neither an lvalue
13792 nor an integer constant expression. */
13793 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13794 return NULL_TREE;
13795 /* Don't let (0, 0) be null pointer constant. */
13796 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13797 : fold_convert_loc (loc, type, arg1);
13798 return pedantic_non_lvalue_loc (loc, tem);
13799
13800 case COMPLEX_EXPR:
13801 if ((TREE_CODE (arg0) == REAL_CST
13802 && TREE_CODE (arg1) == REAL_CST)
13803 || (TREE_CODE (arg0) == INTEGER_CST
13804 && TREE_CODE (arg1) == INTEGER_CST))
13805 return build_complex (type, arg0, arg1);
13806 if (TREE_CODE (arg0) == REALPART_EXPR
13807 && TREE_CODE (arg1) == IMAGPART_EXPR
13808 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
13809 && operand_equal_p (TREE_OPERAND (arg0, 0),
13810 TREE_OPERAND (arg1, 0), 0))
13811 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
13812 TREE_OPERAND (arg1, 0));
13813 return NULL_TREE;
13814
13815 case ASSERT_EXPR:
13816 /* An ASSERT_EXPR should never be passed to fold_binary. */
13817 gcc_unreachable ();
13818
13819 case VEC_PACK_TRUNC_EXPR:
13820 case VEC_PACK_FIX_TRUNC_EXPR:
13821 {
13822 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13823 tree *elts;
13824
13825 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
13826 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
13827 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13828 return NULL_TREE;
13829
13830 elts = XALLOCAVEC (tree, nelts);
13831 if (!vec_cst_ctor_to_array (arg0, elts)
13832 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
13833 return NULL_TREE;
13834
13835 for (i = 0; i < nelts; i++)
13836 {
13837 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
13838 ? NOP_EXPR : FIX_TRUNC_EXPR,
13839 TREE_TYPE (type), elts[i]);
13840 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
13841 return NULL_TREE;
13842 }
13843
13844 return build_vector (type, elts);
13845 }
13846
13847 case VEC_WIDEN_MULT_LO_EXPR:
13848 case VEC_WIDEN_MULT_HI_EXPR:
13849 case VEC_WIDEN_MULT_EVEN_EXPR:
13850 case VEC_WIDEN_MULT_ODD_EXPR:
13851 {
13852 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
13853 unsigned int out, ofs, scale;
13854 tree *elts;
13855
13856 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
13857 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
13858 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
13859 return NULL_TREE;
13860
13861 elts = XALLOCAVEC (tree, nelts * 4);
13862 if (!vec_cst_ctor_to_array (arg0, elts)
13863 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
13864 return NULL_TREE;
13865
13866 if (code == VEC_WIDEN_MULT_LO_EXPR)
13867 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
13868 else if (code == VEC_WIDEN_MULT_HI_EXPR)
13869 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
13870 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
13871 scale = 1, ofs = 0;
13872 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
13873 scale = 1, ofs = 1;
13874
13875 for (out = 0; out < nelts; out++)
13876 {
13877 unsigned int in1 = (out << scale) + ofs;
13878 unsigned int in2 = in1 + nelts * 2;
13879 tree t1, t2;
13880
13881 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
13882 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
13883
13884 if (t1 == NULL_TREE || t2 == NULL_TREE)
13885 return NULL_TREE;
13886 elts[out] = const_binop (MULT_EXPR, t1, t2);
13887 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
13888 return NULL_TREE;
13889 }
13890
13891 return build_vector (type, elts);
13892 }
13893
13894 default:
13895 return NULL_TREE;
13896 } /* switch (code) */
13897 }
13898
13899 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13900 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13901 of GOTO_EXPR. */
13902
13903 static tree
13904 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13905 {
13906 switch (TREE_CODE (*tp))
13907 {
13908 case LABEL_EXPR:
13909 return *tp;
13910
13911 case GOTO_EXPR:
13912 *walk_subtrees = 0;
13913
13914 /* ... fall through ... */
13915
13916 default:
13917 return NULL_TREE;
13918 }
13919 }
13920
13921 /* Return whether the sub-tree ST contains a label which is accessible from
13922 outside the sub-tree. */
13923
13924 static bool
13925 contains_label_p (tree st)
13926 {
13927 return
13928 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13929 }
13930
13931 /* Fold a ternary expression of code CODE and type TYPE with operands
13932 OP0, OP1, and OP2. Return the folded expression if folding is
13933 successful. Otherwise, return NULL_TREE. */
13934
13935 tree
13936 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13937 tree op0, tree op1, tree op2)
13938 {
13939 tree tem;
13940 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13941 enum tree_code_class kind = TREE_CODE_CLASS (code);
13942
13943 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13944 && TREE_CODE_LENGTH (code) == 3);
13945
13946 /* Strip any conversions that don't change the mode. This is safe
13947 for every expression, except for a comparison expression because
13948 its signedness is derived from its operands. So, in the latter
13949 case, only strip conversions that don't change the signedness.
13950
13951 Note that this is done as an internal manipulation within the
13952 constant folder, in order to find the simplest representation of
13953 the arguments so that their form can be studied. In any cases,
13954 the appropriate type conversions should be put back in the tree
13955 that will get out of the constant folder. */
13956 if (op0)
13957 {
13958 arg0 = op0;
13959 STRIP_NOPS (arg0);
13960 }
13961
13962 if (op1)
13963 {
13964 arg1 = op1;
13965 STRIP_NOPS (arg1);
13966 }
13967
13968 if (op2)
13969 {
13970 arg2 = op2;
13971 STRIP_NOPS (arg2);
13972 }
13973
13974 switch (code)
13975 {
13976 case COMPONENT_REF:
13977 if (TREE_CODE (arg0) == CONSTRUCTOR
13978 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13979 {
13980 unsigned HOST_WIDE_INT idx;
13981 tree field, value;
13982 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13983 if (field == arg1)
13984 return value;
13985 }
13986 return NULL_TREE;
13987
13988 case COND_EXPR:
13989 case VEC_COND_EXPR:
13990 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13991 so all simple results must be passed through pedantic_non_lvalue. */
13992 if (TREE_CODE (arg0) == INTEGER_CST)
13993 {
13994 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13995 tem = integer_zerop (arg0) ? op2 : op1;
13996 /* Only optimize constant conditions when the selected branch
13997 has the same type as the COND_EXPR. This avoids optimizing
13998 away "c ? x : throw", where the throw has a void type.
13999 Avoid throwing away that operand which contains label. */
14000 if ((!TREE_SIDE_EFFECTS (unused_op)
14001 || !contains_label_p (unused_op))
14002 && (! VOID_TYPE_P (TREE_TYPE (tem))
14003 || VOID_TYPE_P (type)))
14004 return pedantic_non_lvalue_loc (loc, tem);
14005 return NULL_TREE;
14006 }
14007 else if (TREE_CODE (arg0) == VECTOR_CST)
14008 {
14009 if (integer_all_onesp (arg0))
14010 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14011 if (integer_zerop (arg0))
14012 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14013
14014 if ((TREE_CODE (arg1) == VECTOR_CST
14015 || TREE_CODE (arg1) == CONSTRUCTOR)
14016 && (TREE_CODE (arg2) == VECTOR_CST
14017 || TREE_CODE (arg2) == CONSTRUCTOR))
14018 {
14019 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14020 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14021 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14022 for (i = 0; i < nelts; i++)
14023 {
14024 tree val = VECTOR_CST_ELT (arg0, i);
14025 if (integer_all_onesp (val))
14026 sel[i] = i;
14027 else if (integer_zerop (val))
14028 sel[i] = nelts + i;
14029 else /* Currently unreachable. */
14030 return NULL_TREE;
14031 }
14032 tree t = fold_vec_perm (type, arg1, arg2, sel);
14033 if (t != NULL_TREE)
14034 return t;
14035 }
14036 }
14037
14038 if (operand_equal_p (arg1, op2, 0))
14039 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14040
14041 /* If we have A op B ? A : C, we may be able to convert this to a
14042 simpler expression, depending on the operation and the values
14043 of B and C. Signed zeros prevent all of these transformations,
14044 for reasons given above each one.
14045
14046 Also try swapping the arguments and inverting the conditional. */
14047 if (COMPARISON_CLASS_P (arg0)
14048 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14049 arg1, TREE_OPERAND (arg0, 1))
14050 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14051 {
14052 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14053 if (tem)
14054 return tem;
14055 }
14056
14057 if (COMPARISON_CLASS_P (arg0)
14058 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14059 op2,
14060 TREE_OPERAND (arg0, 1))
14061 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14062 {
14063 location_t loc0 = expr_location_or (arg0, loc);
14064 tem = fold_invert_truthvalue (loc0, arg0);
14065 if (tem && COMPARISON_CLASS_P (tem))
14066 {
14067 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14068 if (tem)
14069 return tem;
14070 }
14071 }
14072
14073 /* If the second operand is simpler than the third, swap them
14074 since that produces better jump optimization results. */
14075 if (truth_value_p (TREE_CODE (arg0))
14076 && tree_swap_operands_p (op1, op2, false))
14077 {
14078 location_t loc0 = expr_location_or (arg0, loc);
14079 /* See if this can be inverted. If it can't, possibly because
14080 it was a floating-point inequality comparison, don't do
14081 anything. */
14082 tem = fold_invert_truthvalue (loc0, arg0);
14083 if (tem)
14084 return fold_build3_loc (loc, code, type, tem, op2, op1);
14085 }
14086
14087 /* Convert A ? 1 : 0 to simply A. */
14088 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14089 : (integer_onep (op1)
14090 && !VECTOR_TYPE_P (type)))
14091 && integer_zerop (op2)
14092 /* If we try to convert OP0 to our type, the
14093 call to fold will try to move the conversion inside
14094 a COND, which will recurse. In that case, the COND_EXPR
14095 is probably the best choice, so leave it alone. */
14096 && type == TREE_TYPE (arg0))
14097 return pedantic_non_lvalue_loc (loc, arg0);
14098
14099 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14100 over COND_EXPR in cases such as floating point comparisons. */
14101 if (integer_zerop (op1)
14102 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14103 : (integer_onep (op2)
14104 && !VECTOR_TYPE_P (type)))
14105 && truth_value_p (TREE_CODE (arg0)))
14106 return pedantic_non_lvalue_loc (loc,
14107 fold_convert_loc (loc, type,
14108 invert_truthvalue_loc (loc,
14109 arg0)));
14110
14111 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14112 if (TREE_CODE (arg0) == LT_EXPR
14113 && integer_zerop (TREE_OPERAND (arg0, 1))
14114 && integer_zerop (op2)
14115 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14116 {
14117 /* sign_bit_p only checks ARG1 bits within A's precision.
14118 If <sign bit of A> has wider type than A, bits outside
14119 of A's precision in <sign bit of A> need to be checked.
14120 If they are all 0, this optimization needs to be done
14121 in unsigned A's type, if they are all 1 in signed A's type,
14122 otherwise this can't be done. */
14123 if (TYPE_PRECISION (TREE_TYPE (tem))
14124 < TYPE_PRECISION (TREE_TYPE (arg1))
14125 && TYPE_PRECISION (TREE_TYPE (tem))
14126 < TYPE_PRECISION (type))
14127 {
14128 unsigned HOST_WIDE_INT mask_lo;
14129 HOST_WIDE_INT mask_hi;
14130 int inner_width, outer_width;
14131 tree tem_type;
14132
14133 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14134 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14135 if (outer_width > TYPE_PRECISION (type))
14136 outer_width = TYPE_PRECISION (type);
14137
14138 if (outer_width > HOST_BITS_PER_WIDE_INT)
14139 {
14140 mask_hi = ((unsigned HOST_WIDE_INT) -1
14141 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14142 mask_lo = -1;
14143 }
14144 else
14145 {
14146 mask_hi = 0;
14147 mask_lo = ((unsigned HOST_WIDE_INT) -1
14148 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14149 }
14150 if (inner_width > HOST_BITS_PER_WIDE_INT)
14151 {
14152 mask_hi &= ~((unsigned HOST_WIDE_INT) -1
14153 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14154 mask_lo = 0;
14155 }
14156 else
14157 mask_lo &= ~((unsigned HOST_WIDE_INT) -1
14158 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14159
14160 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14161 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14162 {
14163 tem_type = signed_type_for (TREE_TYPE (tem));
14164 tem = fold_convert_loc (loc, tem_type, tem);
14165 }
14166 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14167 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14168 {
14169 tem_type = unsigned_type_for (TREE_TYPE (tem));
14170 tem = fold_convert_loc (loc, tem_type, tem);
14171 }
14172 else
14173 tem = NULL;
14174 }
14175
14176 if (tem)
14177 return
14178 fold_convert_loc (loc, type,
14179 fold_build2_loc (loc, BIT_AND_EXPR,
14180 TREE_TYPE (tem), tem,
14181 fold_convert_loc (loc,
14182 TREE_TYPE (tem),
14183 arg1)));
14184 }
14185
14186 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14187 already handled above. */
14188 if (TREE_CODE (arg0) == BIT_AND_EXPR
14189 && integer_onep (TREE_OPERAND (arg0, 1))
14190 && integer_zerop (op2)
14191 && integer_pow2p (arg1))
14192 {
14193 tree tem = TREE_OPERAND (arg0, 0);
14194 STRIP_NOPS (tem);
14195 if (TREE_CODE (tem) == RSHIFT_EXPR
14196 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14197 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14198 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14199 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14200 TREE_OPERAND (tem, 0), arg1);
14201 }
14202
14203 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14204 is probably obsolete because the first operand should be a
14205 truth value (that's why we have the two cases above), but let's
14206 leave it in until we can confirm this for all front-ends. */
14207 if (integer_zerop (op2)
14208 && TREE_CODE (arg0) == NE_EXPR
14209 && integer_zerop (TREE_OPERAND (arg0, 1))
14210 && integer_pow2p (arg1)
14211 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14212 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14213 arg1, OEP_ONLY_CONST))
14214 return pedantic_non_lvalue_loc (loc,
14215 fold_convert_loc (loc, type,
14216 TREE_OPERAND (arg0, 0)));
14217
14218 /* Disable the transformations below for vectors, since
14219 fold_binary_op_with_conditional_arg may undo them immediately,
14220 yielding an infinite loop. */
14221 if (code == VEC_COND_EXPR)
14222 return NULL_TREE;
14223
14224 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14225 if (integer_zerop (op2)
14226 && truth_value_p (TREE_CODE (arg0))
14227 && truth_value_p (TREE_CODE (arg1))
14228 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14229 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14230 : TRUTH_ANDIF_EXPR,
14231 type, fold_convert_loc (loc, type, arg0), arg1);
14232
14233 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14234 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14235 && truth_value_p (TREE_CODE (arg0))
14236 && truth_value_p (TREE_CODE (arg1))
14237 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14238 {
14239 location_t loc0 = expr_location_or (arg0, loc);
14240 /* Only perform transformation if ARG0 is easily inverted. */
14241 tem = fold_invert_truthvalue (loc0, arg0);
14242 if (tem)
14243 return fold_build2_loc (loc, code == VEC_COND_EXPR
14244 ? BIT_IOR_EXPR
14245 : TRUTH_ORIF_EXPR,
14246 type, fold_convert_loc (loc, type, tem),
14247 arg1);
14248 }
14249
14250 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14251 if (integer_zerop (arg1)
14252 && truth_value_p (TREE_CODE (arg0))
14253 && truth_value_p (TREE_CODE (op2))
14254 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14255 {
14256 location_t loc0 = expr_location_or (arg0, loc);
14257 /* Only perform transformation if ARG0 is easily inverted. */
14258 tem = fold_invert_truthvalue (loc0, arg0);
14259 if (tem)
14260 return fold_build2_loc (loc, code == VEC_COND_EXPR
14261 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14262 type, fold_convert_loc (loc, type, tem),
14263 op2);
14264 }
14265
14266 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14267 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14268 && truth_value_p (TREE_CODE (arg0))
14269 && truth_value_p (TREE_CODE (op2))
14270 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14271 return fold_build2_loc (loc, code == VEC_COND_EXPR
14272 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14273 type, fold_convert_loc (loc, type, arg0), op2);
14274
14275 return NULL_TREE;
14276
14277 case CALL_EXPR:
14278 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14279 of fold_ternary on them. */
14280 gcc_unreachable ();
14281
14282 case BIT_FIELD_REF:
14283 if ((TREE_CODE (arg0) == VECTOR_CST
14284 || (TREE_CODE (arg0) == CONSTRUCTOR
14285 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14286 && (type == TREE_TYPE (TREE_TYPE (arg0))
14287 || (TREE_CODE (type) == VECTOR_TYPE
14288 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14289 {
14290 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14291 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14292 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14293 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14294
14295 if (n != 0
14296 && (idx % width) == 0
14297 && (n % width) == 0
14298 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14299 {
14300 idx = idx / width;
14301 n = n / width;
14302
14303 if (TREE_CODE (arg0) == VECTOR_CST)
14304 {
14305 if (n == 1)
14306 return VECTOR_CST_ELT (arg0, idx);
14307
14308 tree *vals = XALLOCAVEC (tree, n);
14309 for (unsigned i = 0; i < n; ++i)
14310 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14311 return build_vector (type, vals);
14312 }
14313
14314 /* Constructor elements can be subvectors. */
14315 unsigned HOST_WIDE_INT k = 1;
14316 if (CONSTRUCTOR_NELTS (arg0) != 0)
14317 {
14318 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14319 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14320 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14321 }
14322
14323 /* We keep an exact subset of the constructor elements. */
14324 if ((idx % k) == 0 && (n % k) == 0)
14325 {
14326 if (CONSTRUCTOR_NELTS (arg0) == 0)
14327 return build_constructor (type, NULL);
14328 idx /= k;
14329 n /= k;
14330 if (n == 1)
14331 {
14332 if (idx < CONSTRUCTOR_NELTS (arg0))
14333 return CONSTRUCTOR_ELT (arg0, idx)->value;
14334 return build_zero_cst (type);
14335 }
14336
14337 vec<constructor_elt, va_gc> *vals;
14338 vec_alloc (vals, n);
14339 for (unsigned i = 0;
14340 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14341 ++i)
14342 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14343 CONSTRUCTOR_ELT
14344 (arg0, idx + i)->value);
14345 return build_constructor (type, vals);
14346 }
14347 /* The bitfield references a single constructor element. */
14348 else if (idx + n <= (idx / k + 1) * k)
14349 {
14350 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14351 return build_zero_cst (type);
14352 else if (n == k)
14353 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14354 else
14355 return fold_build3_loc (loc, code, type,
14356 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14357 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14358 }
14359 }
14360 }
14361
14362 /* A bit-field-ref that referenced the full argument can be stripped. */
14363 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14364 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14365 && integer_zerop (op2))
14366 return fold_convert_loc (loc, type, arg0);
14367
14368 /* On constants we can use native encode/interpret to constant
14369 fold (nearly) all BIT_FIELD_REFs. */
14370 if (CONSTANT_CLASS_P (arg0)
14371 && can_native_interpret_type_p (type)
14372 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14373 /* This limitation should not be necessary, we just need to
14374 round this up to mode size. */
14375 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14376 /* Need bit-shifting of the buffer to relax the following. */
14377 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14378 {
14379 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14380 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14381 unsigned HOST_WIDE_INT clen;
14382 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14383 /* ??? We cannot tell native_encode_expr to start at
14384 some random byte only. So limit us to a reasonable amount
14385 of work. */
14386 if (clen <= 4096)
14387 {
14388 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14389 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14390 if (len > 0
14391 && len * BITS_PER_UNIT >= bitpos + bitsize)
14392 {
14393 tree v = native_interpret_expr (type,
14394 b + bitpos / BITS_PER_UNIT,
14395 bitsize / BITS_PER_UNIT);
14396 if (v)
14397 return v;
14398 }
14399 }
14400 }
14401
14402 return NULL_TREE;
14403
14404 case FMA_EXPR:
14405 /* For integers we can decompose the FMA if possible. */
14406 if (TREE_CODE (arg0) == INTEGER_CST
14407 && TREE_CODE (arg1) == INTEGER_CST)
14408 return fold_build2_loc (loc, PLUS_EXPR, type,
14409 const_binop (MULT_EXPR, arg0, arg1), arg2);
14410 if (integer_zerop (arg2))
14411 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14412
14413 return fold_fma (loc, type, arg0, arg1, arg2);
14414
14415 case VEC_PERM_EXPR:
14416 if (TREE_CODE (arg2) == VECTOR_CST)
14417 {
14418 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14419 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14420 tree t;
14421 bool need_mask_canon = false;
14422 bool all_in_vec0 = true;
14423 bool all_in_vec1 = true;
14424 bool maybe_identity = true;
14425 bool single_arg = (op0 == op1);
14426 bool changed = false;
14427
14428 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14429 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14430 for (i = 0; i < nelts; i++)
14431 {
14432 tree val = VECTOR_CST_ELT (arg2, i);
14433 if (TREE_CODE (val) != INTEGER_CST)
14434 return NULL_TREE;
14435
14436 sel[i] = TREE_INT_CST_LOW (val) & mask;
14437 if (TREE_INT_CST_HIGH (val)
14438 || ((unsigned HOST_WIDE_INT)
14439 TREE_INT_CST_LOW (val) != sel[i]))
14440 need_mask_canon = true;
14441
14442 if (sel[i] < nelts)
14443 all_in_vec1 = false;
14444 else
14445 all_in_vec0 = false;
14446
14447 if ((sel[i] & (nelts-1)) != i)
14448 maybe_identity = false;
14449 }
14450
14451 if (maybe_identity)
14452 {
14453 if (all_in_vec0)
14454 return op0;
14455 if (all_in_vec1)
14456 return op1;
14457 }
14458
14459 if (all_in_vec0)
14460 op1 = op0;
14461 else if (all_in_vec1)
14462 {
14463 op0 = op1;
14464 for (i = 0; i < nelts; i++)
14465 sel[i] -= nelts;
14466 need_mask_canon = true;
14467 }
14468
14469 if ((TREE_CODE (op0) == VECTOR_CST
14470 || TREE_CODE (op0) == CONSTRUCTOR)
14471 && (TREE_CODE (op1) == VECTOR_CST
14472 || TREE_CODE (op1) == CONSTRUCTOR))
14473 {
14474 t = fold_vec_perm (type, op0, op1, sel);
14475 if (t != NULL_TREE)
14476 return t;
14477 }
14478
14479 if (op0 == op1 && !single_arg)
14480 changed = true;
14481
14482 if (need_mask_canon && arg2 == op2)
14483 {
14484 tree *tsel = XALLOCAVEC (tree, nelts);
14485 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14486 for (i = 0; i < nelts; i++)
14487 tsel[i] = build_int_cst (eltype, sel[i]);
14488 op2 = build_vector (TREE_TYPE (arg2), tsel);
14489 changed = true;
14490 }
14491
14492 if (changed)
14493 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14494 }
14495 return NULL_TREE;
14496
14497 default:
14498 return NULL_TREE;
14499 } /* switch (code) */
14500 }
14501
14502 /* Perform constant folding and related simplification of EXPR.
14503 The related simplifications include x*1 => x, x*0 => 0, etc.,
14504 and application of the associative law.
14505 NOP_EXPR conversions may be removed freely (as long as we
14506 are careful not to change the type of the overall expression).
14507 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14508 but we can constant-fold them if they have constant operands. */
14509
14510 #ifdef ENABLE_FOLD_CHECKING
14511 # define fold(x) fold_1 (x)
14512 static tree fold_1 (tree);
14513 static
14514 #endif
14515 tree
14516 fold (tree expr)
14517 {
14518 const tree t = expr;
14519 enum tree_code code = TREE_CODE (t);
14520 enum tree_code_class kind = TREE_CODE_CLASS (code);
14521 tree tem;
14522 location_t loc = EXPR_LOCATION (expr);
14523
14524 /* Return right away if a constant. */
14525 if (kind == tcc_constant)
14526 return t;
14527
14528 /* CALL_EXPR-like objects with variable numbers of operands are
14529 treated specially. */
14530 if (kind == tcc_vl_exp)
14531 {
14532 if (code == CALL_EXPR)
14533 {
14534 tem = fold_call_expr (loc, expr, false);
14535 return tem ? tem : expr;
14536 }
14537 return expr;
14538 }
14539
14540 if (IS_EXPR_CODE_CLASS (kind))
14541 {
14542 tree type = TREE_TYPE (t);
14543 tree op0, op1, op2;
14544
14545 switch (TREE_CODE_LENGTH (code))
14546 {
14547 case 1:
14548 op0 = TREE_OPERAND (t, 0);
14549 tem = fold_unary_loc (loc, code, type, op0);
14550 return tem ? tem : expr;
14551 case 2:
14552 op0 = TREE_OPERAND (t, 0);
14553 op1 = TREE_OPERAND (t, 1);
14554 tem = fold_binary_loc (loc, code, type, op0, op1);
14555 return tem ? tem : expr;
14556 case 3:
14557 op0 = TREE_OPERAND (t, 0);
14558 op1 = TREE_OPERAND (t, 1);
14559 op2 = TREE_OPERAND (t, 2);
14560 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14561 return tem ? tem : expr;
14562 default:
14563 break;
14564 }
14565 }
14566
14567 switch (code)
14568 {
14569 case ARRAY_REF:
14570 {
14571 tree op0 = TREE_OPERAND (t, 0);
14572 tree op1 = TREE_OPERAND (t, 1);
14573
14574 if (TREE_CODE (op1) == INTEGER_CST
14575 && TREE_CODE (op0) == CONSTRUCTOR
14576 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14577 {
14578 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14579 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14580 unsigned HOST_WIDE_INT begin = 0;
14581
14582 /* Find a matching index by means of a binary search. */
14583 while (begin != end)
14584 {
14585 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14586 tree index = (*elts)[middle].index;
14587
14588 if (TREE_CODE (index) == INTEGER_CST
14589 && tree_int_cst_lt (index, op1))
14590 begin = middle + 1;
14591 else if (TREE_CODE (index) == INTEGER_CST
14592 && tree_int_cst_lt (op1, index))
14593 end = middle;
14594 else if (TREE_CODE (index) == RANGE_EXPR
14595 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14596 begin = middle + 1;
14597 else if (TREE_CODE (index) == RANGE_EXPR
14598 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14599 end = middle;
14600 else
14601 return (*elts)[middle].value;
14602 }
14603 }
14604
14605 return t;
14606 }
14607
14608 /* Return a VECTOR_CST if possible. */
14609 case CONSTRUCTOR:
14610 {
14611 tree type = TREE_TYPE (t);
14612 if (TREE_CODE (type) != VECTOR_TYPE)
14613 return t;
14614
14615 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14616 unsigned HOST_WIDE_INT idx, pos = 0;
14617 tree value;
14618
14619 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14620 {
14621 if (!CONSTANT_CLASS_P (value))
14622 return t;
14623 if (TREE_CODE (value) == VECTOR_CST)
14624 {
14625 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14626 vec[pos++] = VECTOR_CST_ELT (value, i);
14627 }
14628 else
14629 vec[pos++] = value;
14630 }
14631 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14632 vec[pos] = build_zero_cst (TREE_TYPE (type));
14633
14634 return build_vector (type, vec);
14635 }
14636
14637 case CONST_DECL:
14638 return fold (DECL_INITIAL (t));
14639
14640 default:
14641 return t;
14642 } /* switch (code) */
14643 }
14644
14645 #ifdef ENABLE_FOLD_CHECKING
14646 #undef fold
14647
14648 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14649 hash_table <pointer_hash <tree_node> >);
14650 static void fold_check_failed (const_tree, const_tree);
14651 void print_fold_checksum (const_tree);
14652
14653 /* When --enable-checking=fold, compute a digest of expr before
14654 and after actual fold call to see if fold did not accidentally
14655 change original expr. */
14656
14657 tree
14658 fold (tree expr)
14659 {
14660 tree ret;
14661 struct md5_ctx ctx;
14662 unsigned char checksum_before[16], checksum_after[16];
14663 hash_table <pointer_hash <tree_node> > ht;
14664
14665 ht.create (32);
14666 md5_init_ctx (&ctx);
14667 fold_checksum_tree (expr, &ctx, ht);
14668 md5_finish_ctx (&ctx, checksum_before);
14669 ht.empty ();
14670
14671 ret = fold_1 (expr);
14672
14673 md5_init_ctx (&ctx);
14674 fold_checksum_tree (expr, &ctx, ht);
14675 md5_finish_ctx (&ctx, checksum_after);
14676 ht.dispose ();
14677
14678 if (memcmp (checksum_before, checksum_after, 16))
14679 fold_check_failed (expr, ret);
14680
14681 return ret;
14682 }
14683
14684 void
14685 print_fold_checksum (const_tree expr)
14686 {
14687 struct md5_ctx ctx;
14688 unsigned char checksum[16], cnt;
14689 hash_table <pointer_hash <tree_node> > ht;
14690
14691 ht.create (32);
14692 md5_init_ctx (&ctx);
14693 fold_checksum_tree (expr, &ctx, ht);
14694 md5_finish_ctx (&ctx, checksum);
14695 ht.dispose ();
14696 for (cnt = 0; cnt < 16; ++cnt)
14697 fprintf (stderr, "%02x", checksum[cnt]);
14698 putc ('\n', stderr);
14699 }
14700
14701 static void
14702 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14703 {
14704 internal_error ("fold check: original tree changed by fold");
14705 }
14706
14707 static void
14708 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14709 hash_table <pointer_hash <tree_node> > ht)
14710 {
14711 tree_node **slot;
14712 enum tree_code code;
14713 union tree_node buf;
14714 int i, len;
14715
14716 recursive_label:
14717 if (expr == NULL)
14718 return;
14719 slot = ht.find_slot (expr, INSERT);
14720 if (*slot != NULL)
14721 return;
14722 *slot = CONST_CAST_TREE (expr);
14723 code = TREE_CODE (expr);
14724 if (TREE_CODE_CLASS (code) == tcc_declaration
14725 && DECL_ASSEMBLER_NAME_SET_P (expr))
14726 {
14727 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14728 memcpy ((char *) &buf, expr, tree_size (expr));
14729 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14730 expr = (tree) &buf;
14731 }
14732 else if (TREE_CODE_CLASS (code) == tcc_type
14733 && (TYPE_POINTER_TO (expr)
14734 || TYPE_REFERENCE_TO (expr)
14735 || TYPE_CACHED_VALUES_P (expr)
14736 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14737 || TYPE_NEXT_VARIANT (expr)))
14738 {
14739 /* Allow these fields to be modified. */
14740 tree tmp;
14741 memcpy ((char *) &buf, expr, tree_size (expr));
14742 expr = tmp = (tree) &buf;
14743 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14744 TYPE_POINTER_TO (tmp) = NULL;
14745 TYPE_REFERENCE_TO (tmp) = NULL;
14746 TYPE_NEXT_VARIANT (tmp) = NULL;
14747 if (TYPE_CACHED_VALUES_P (tmp))
14748 {
14749 TYPE_CACHED_VALUES_P (tmp) = 0;
14750 TYPE_CACHED_VALUES (tmp) = NULL;
14751 }
14752 }
14753 md5_process_bytes (expr, tree_size (expr), ctx);
14754 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14755 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14756 if (TREE_CODE_CLASS (code) != tcc_type
14757 && TREE_CODE_CLASS (code) != tcc_declaration
14758 && code != TREE_LIST
14759 && code != SSA_NAME
14760 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14761 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14762 switch (TREE_CODE_CLASS (code))
14763 {
14764 case tcc_constant:
14765 switch (code)
14766 {
14767 case STRING_CST:
14768 md5_process_bytes (TREE_STRING_POINTER (expr),
14769 TREE_STRING_LENGTH (expr), ctx);
14770 break;
14771 case COMPLEX_CST:
14772 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14773 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14774 break;
14775 case VECTOR_CST:
14776 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14777 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14778 break;
14779 default:
14780 break;
14781 }
14782 break;
14783 case tcc_exceptional:
14784 switch (code)
14785 {
14786 case TREE_LIST:
14787 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14788 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14789 expr = TREE_CHAIN (expr);
14790 goto recursive_label;
14791 break;
14792 case TREE_VEC:
14793 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14794 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14795 break;
14796 default:
14797 break;
14798 }
14799 break;
14800 case tcc_expression:
14801 case tcc_reference:
14802 case tcc_comparison:
14803 case tcc_unary:
14804 case tcc_binary:
14805 case tcc_statement:
14806 case tcc_vl_exp:
14807 len = TREE_OPERAND_LENGTH (expr);
14808 for (i = 0; i < len; ++i)
14809 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14810 break;
14811 case tcc_declaration:
14812 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14813 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14814 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14815 {
14816 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14817 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14818 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14819 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14820 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14821 }
14822 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
14823 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
14824
14825 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14826 {
14827 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14828 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14829 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
14830 }
14831 break;
14832 case tcc_type:
14833 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14834 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14835 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14836 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14837 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14838 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14839 if (INTEGRAL_TYPE_P (expr)
14840 || SCALAR_FLOAT_TYPE_P (expr))
14841 {
14842 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14843 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14844 }
14845 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14846 if (TREE_CODE (expr) == RECORD_TYPE
14847 || TREE_CODE (expr) == UNION_TYPE
14848 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14849 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14850 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14851 break;
14852 default:
14853 break;
14854 }
14855 }
14856
14857 /* Helper function for outputting the checksum of a tree T. When
14858 debugging with gdb, you can "define mynext" to be "next" followed
14859 by "call debug_fold_checksum (op0)", then just trace down till the
14860 outputs differ. */
14861
14862 DEBUG_FUNCTION void
14863 debug_fold_checksum (const_tree t)
14864 {
14865 int i;
14866 unsigned char checksum[16];
14867 struct md5_ctx ctx;
14868 hash_table <pointer_hash <tree_node> > ht;
14869 ht.create (32);
14870
14871 md5_init_ctx (&ctx);
14872 fold_checksum_tree (t, &ctx, ht);
14873 md5_finish_ctx (&ctx, checksum);
14874 ht.empty ();
14875
14876 for (i = 0; i < 16; i++)
14877 fprintf (stderr, "%d ", checksum[i]);
14878
14879 fprintf (stderr, "\n");
14880 }
14881
14882 #endif
14883
14884 /* Fold a unary tree expression with code CODE of type TYPE with an
14885 operand OP0. LOC is the location of the resulting expression.
14886 Return a folded expression if successful. Otherwise, return a tree
14887 expression with code CODE of type TYPE with an operand OP0. */
14888
14889 tree
14890 fold_build1_stat_loc (location_t loc,
14891 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14892 {
14893 tree tem;
14894 #ifdef ENABLE_FOLD_CHECKING
14895 unsigned char checksum_before[16], checksum_after[16];
14896 struct md5_ctx ctx;
14897 hash_table <pointer_hash <tree_node> > ht;
14898
14899 ht.create (32);
14900 md5_init_ctx (&ctx);
14901 fold_checksum_tree (op0, &ctx, ht);
14902 md5_finish_ctx (&ctx, checksum_before);
14903 ht.empty ();
14904 #endif
14905
14906 tem = fold_unary_loc (loc, code, type, op0);
14907 if (!tem)
14908 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14909
14910 #ifdef ENABLE_FOLD_CHECKING
14911 md5_init_ctx (&ctx);
14912 fold_checksum_tree (op0, &ctx, ht);
14913 md5_finish_ctx (&ctx, checksum_after);
14914 ht.dispose ();
14915
14916 if (memcmp (checksum_before, checksum_after, 16))
14917 fold_check_failed (op0, tem);
14918 #endif
14919 return tem;
14920 }
14921
14922 /* Fold a binary tree expression with code CODE of type TYPE with
14923 operands OP0 and OP1. LOC is the location of the resulting
14924 expression. Return a folded expression if successful. Otherwise,
14925 return a tree expression with code CODE of type TYPE with operands
14926 OP0 and OP1. */
14927
14928 tree
14929 fold_build2_stat_loc (location_t loc,
14930 enum tree_code code, tree type, tree op0, tree op1
14931 MEM_STAT_DECL)
14932 {
14933 tree tem;
14934 #ifdef ENABLE_FOLD_CHECKING
14935 unsigned char checksum_before_op0[16],
14936 checksum_before_op1[16],
14937 checksum_after_op0[16],
14938 checksum_after_op1[16];
14939 struct md5_ctx ctx;
14940 hash_table <pointer_hash <tree_node> > ht;
14941
14942 ht.create (32);
14943 md5_init_ctx (&ctx);
14944 fold_checksum_tree (op0, &ctx, ht);
14945 md5_finish_ctx (&ctx, checksum_before_op0);
14946 ht.empty ();
14947
14948 md5_init_ctx (&ctx);
14949 fold_checksum_tree (op1, &ctx, ht);
14950 md5_finish_ctx (&ctx, checksum_before_op1);
14951 ht.empty ();
14952 #endif
14953
14954 tem = fold_binary_loc (loc, code, type, op0, op1);
14955 if (!tem)
14956 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14957
14958 #ifdef ENABLE_FOLD_CHECKING
14959 md5_init_ctx (&ctx);
14960 fold_checksum_tree (op0, &ctx, ht);
14961 md5_finish_ctx (&ctx, checksum_after_op0);
14962 ht.empty ();
14963
14964 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14965 fold_check_failed (op0, tem);
14966
14967 md5_init_ctx (&ctx);
14968 fold_checksum_tree (op1, &ctx, ht);
14969 md5_finish_ctx (&ctx, checksum_after_op1);
14970 ht.dispose ();
14971
14972 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14973 fold_check_failed (op1, tem);
14974 #endif
14975 return tem;
14976 }
14977
14978 /* Fold a ternary tree expression with code CODE of type TYPE with
14979 operands OP0, OP1, and OP2. Return a folded expression if
14980 successful. Otherwise, return a tree expression with code CODE of
14981 type TYPE with operands OP0, OP1, and OP2. */
14982
14983 tree
14984 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14985 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14986 {
14987 tree tem;
14988 #ifdef ENABLE_FOLD_CHECKING
14989 unsigned char checksum_before_op0[16],
14990 checksum_before_op1[16],
14991 checksum_before_op2[16],
14992 checksum_after_op0[16],
14993 checksum_after_op1[16],
14994 checksum_after_op2[16];
14995 struct md5_ctx ctx;
14996 hash_table <pointer_hash <tree_node> > ht;
14997
14998 ht.create (32);
14999 md5_init_ctx (&ctx);
15000 fold_checksum_tree (op0, &ctx, ht);
15001 md5_finish_ctx (&ctx, checksum_before_op0);
15002 ht.empty ();
15003
15004 md5_init_ctx (&ctx);
15005 fold_checksum_tree (op1, &ctx, ht);
15006 md5_finish_ctx (&ctx, checksum_before_op1);
15007 ht.empty ();
15008
15009 md5_init_ctx (&ctx);
15010 fold_checksum_tree (op2, &ctx, ht);
15011 md5_finish_ctx (&ctx, checksum_before_op2);
15012 ht.empty ();
15013 #endif
15014
15015 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15016 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15017 if (!tem)
15018 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15019
15020 #ifdef ENABLE_FOLD_CHECKING
15021 md5_init_ctx (&ctx);
15022 fold_checksum_tree (op0, &ctx, ht);
15023 md5_finish_ctx (&ctx, checksum_after_op0);
15024 ht.empty ();
15025
15026 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15027 fold_check_failed (op0, tem);
15028
15029 md5_init_ctx (&ctx);
15030 fold_checksum_tree (op1, &ctx, ht);
15031 md5_finish_ctx (&ctx, checksum_after_op1);
15032 ht.empty ();
15033
15034 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15035 fold_check_failed (op1, tem);
15036
15037 md5_init_ctx (&ctx);
15038 fold_checksum_tree (op2, &ctx, ht);
15039 md5_finish_ctx (&ctx, checksum_after_op2);
15040 ht.dispose ();
15041
15042 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15043 fold_check_failed (op2, tem);
15044 #endif
15045 return tem;
15046 }
15047
15048 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15049 arguments in ARGARRAY, and a null static chain.
15050 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15051 of type TYPE from the given operands as constructed by build_call_array. */
15052
15053 tree
15054 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15055 int nargs, tree *argarray)
15056 {
15057 tree tem;
15058 #ifdef ENABLE_FOLD_CHECKING
15059 unsigned char checksum_before_fn[16],
15060 checksum_before_arglist[16],
15061 checksum_after_fn[16],
15062 checksum_after_arglist[16];
15063 struct md5_ctx ctx;
15064 hash_table <pointer_hash <tree_node> > ht;
15065 int i;
15066
15067 ht.create (32);
15068 md5_init_ctx (&ctx);
15069 fold_checksum_tree (fn, &ctx, ht);
15070 md5_finish_ctx (&ctx, checksum_before_fn);
15071 ht.empty ();
15072
15073 md5_init_ctx (&ctx);
15074 for (i = 0; i < nargs; i++)
15075 fold_checksum_tree (argarray[i], &ctx, ht);
15076 md5_finish_ctx (&ctx, checksum_before_arglist);
15077 ht.empty ();
15078 #endif
15079
15080 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15081
15082 #ifdef ENABLE_FOLD_CHECKING
15083 md5_init_ctx (&ctx);
15084 fold_checksum_tree (fn, &ctx, ht);
15085 md5_finish_ctx (&ctx, checksum_after_fn);
15086 ht.empty ();
15087
15088 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15089 fold_check_failed (fn, tem);
15090
15091 md5_init_ctx (&ctx);
15092 for (i = 0; i < nargs; i++)
15093 fold_checksum_tree (argarray[i], &ctx, ht);
15094 md5_finish_ctx (&ctx, checksum_after_arglist);
15095 ht.dispose ();
15096
15097 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15098 fold_check_failed (NULL_TREE, tem);
15099 #endif
15100 return tem;
15101 }
15102
15103 /* Perform constant folding and related simplification of initializer
15104 expression EXPR. These behave identically to "fold_buildN" but ignore
15105 potential run-time traps and exceptions that fold must preserve. */
15106
15107 #define START_FOLD_INIT \
15108 int saved_signaling_nans = flag_signaling_nans;\
15109 int saved_trapping_math = flag_trapping_math;\
15110 int saved_rounding_math = flag_rounding_math;\
15111 int saved_trapv = flag_trapv;\
15112 int saved_folding_initializer = folding_initializer;\
15113 flag_signaling_nans = 0;\
15114 flag_trapping_math = 0;\
15115 flag_rounding_math = 0;\
15116 flag_trapv = 0;\
15117 folding_initializer = 1;
15118
15119 #define END_FOLD_INIT \
15120 flag_signaling_nans = saved_signaling_nans;\
15121 flag_trapping_math = saved_trapping_math;\
15122 flag_rounding_math = saved_rounding_math;\
15123 flag_trapv = saved_trapv;\
15124 folding_initializer = saved_folding_initializer;
15125
15126 tree
15127 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15128 tree type, tree op)
15129 {
15130 tree result;
15131 START_FOLD_INIT;
15132
15133 result = fold_build1_loc (loc, code, type, op);
15134
15135 END_FOLD_INIT;
15136 return result;
15137 }
15138
15139 tree
15140 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15141 tree type, tree op0, tree op1)
15142 {
15143 tree result;
15144 START_FOLD_INIT;
15145
15146 result = fold_build2_loc (loc, code, type, op0, op1);
15147
15148 END_FOLD_INIT;
15149 return result;
15150 }
15151
15152 tree
15153 fold_build3_initializer_loc (location_t loc, enum tree_code code,
15154 tree type, tree op0, tree op1, tree op2)
15155 {
15156 tree result;
15157 START_FOLD_INIT;
15158
15159 result = fold_build3_loc (loc, code, type, op0, op1, op2);
15160
15161 END_FOLD_INIT;
15162 return result;
15163 }
15164
15165 tree
15166 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15167 int nargs, tree *argarray)
15168 {
15169 tree result;
15170 START_FOLD_INIT;
15171
15172 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15173
15174 END_FOLD_INIT;
15175 return result;
15176 }
15177
15178 #undef START_FOLD_INIT
15179 #undef END_FOLD_INIT
15180
15181 /* Determine if first argument is a multiple of second argument. Return 0 if
15182 it is not, or we cannot easily determined it to be.
15183
15184 An example of the sort of thing we care about (at this point; this routine
15185 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15186 fold cases do now) is discovering that
15187
15188 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15189
15190 is a multiple of
15191
15192 SAVE_EXPR (J * 8)
15193
15194 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15195
15196 This code also handles discovering that
15197
15198 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15199
15200 is a multiple of 8 so we don't have to worry about dealing with a
15201 possible remainder.
15202
15203 Note that we *look* inside a SAVE_EXPR only to determine how it was
15204 calculated; it is not safe for fold to do much of anything else with the
15205 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15206 at run time. For example, the latter example above *cannot* be implemented
15207 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15208 evaluation time of the original SAVE_EXPR is not necessarily the same at
15209 the time the new expression is evaluated. The only optimization of this
15210 sort that would be valid is changing
15211
15212 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15213
15214 divided by 8 to
15215
15216 SAVE_EXPR (I) * SAVE_EXPR (J)
15217
15218 (where the same SAVE_EXPR (J) is used in the original and the
15219 transformed version). */
15220
15221 int
15222 multiple_of_p (tree type, const_tree top, const_tree bottom)
15223 {
15224 if (operand_equal_p (top, bottom, 0))
15225 return 1;
15226
15227 if (TREE_CODE (type) != INTEGER_TYPE)
15228 return 0;
15229
15230 switch (TREE_CODE (top))
15231 {
15232 case BIT_AND_EXPR:
15233 /* Bitwise and provides a power of two multiple. If the mask is
15234 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15235 if (!integer_pow2p (bottom))
15236 return 0;
15237 /* FALLTHRU */
15238
15239 case MULT_EXPR:
15240 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15241 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15242
15243 case PLUS_EXPR:
15244 case MINUS_EXPR:
15245 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15246 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15247
15248 case LSHIFT_EXPR:
15249 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15250 {
15251 tree op1, t1;
15252
15253 op1 = TREE_OPERAND (top, 1);
15254 /* const_binop may not detect overflow correctly,
15255 so check for it explicitly here. */
15256 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15257 > TREE_INT_CST_LOW (op1)
15258 && TREE_INT_CST_HIGH (op1) == 0
15259 && 0 != (t1 = fold_convert (type,
15260 const_binop (LSHIFT_EXPR,
15261 size_one_node,
15262 op1)))
15263 && !TREE_OVERFLOW (t1))
15264 return multiple_of_p (type, t1, bottom);
15265 }
15266 return 0;
15267
15268 case NOP_EXPR:
15269 /* Can't handle conversions from non-integral or wider integral type. */
15270 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15271 || (TYPE_PRECISION (type)
15272 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15273 return 0;
15274
15275 /* .. fall through ... */
15276
15277 case SAVE_EXPR:
15278 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15279
15280 case COND_EXPR:
15281 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15282 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15283
15284 case INTEGER_CST:
15285 if (TREE_CODE (bottom) != INTEGER_CST
15286 || integer_zerop (bottom)
15287 || (TYPE_UNSIGNED (type)
15288 && (tree_int_cst_sgn (top) < 0
15289 || tree_int_cst_sgn (bottom) < 0)))
15290 return 0;
15291 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15292 top, bottom));
15293
15294 default:
15295 return 0;
15296 }
15297 }
15298
15299 /* Return true if CODE or TYPE is known to be non-negative. */
15300
15301 static bool
15302 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15303 {
15304 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15305 && truth_value_p (code))
15306 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15307 have a signed:1 type (where the value is -1 and 0). */
15308 return true;
15309 return false;
15310 }
15311
15312 /* Return true if (CODE OP0) is known to be non-negative. If the return
15313 value is based on the assumption that signed overflow is undefined,
15314 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15315 *STRICT_OVERFLOW_P. */
15316
15317 bool
15318 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15319 bool *strict_overflow_p)
15320 {
15321 if (TYPE_UNSIGNED (type))
15322 return true;
15323
15324 switch (code)
15325 {
15326 case ABS_EXPR:
15327 /* We can't return 1 if flag_wrapv is set because
15328 ABS_EXPR<INT_MIN> = INT_MIN. */
15329 if (!INTEGRAL_TYPE_P (type))
15330 return true;
15331 if (TYPE_OVERFLOW_UNDEFINED (type))
15332 {
15333 *strict_overflow_p = true;
15334 return true;
15335 }
15336 break;
15337
15338 case NON_LVALUE_EXPR:
15339 case FLOAT_EXPR:
15340 case FIX_TRUNC_EXPR:
15341 return tree_expr_nonnegative_warnv_p (op0,
15342 strict_overflow_p);
15343
15344 case NOP_EXPR:
15345 {
15346 tree inner_type = TREE_TYPE (op0);
15347 tree outer_type = type;
15348
15349 if (TREE_CODE (outer_type) == REAL_TYPE)
15350 {
15351 if (TREE_CODE (inner_type) == REAL_TYPE)
15352 return tree_expr_nonnegative_warnv_p (op0,
15353 strict_overflow_p);
15354 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15355 {
15356 if (TYPE_UNSIGNED (inner_type))
15357 return true;
15358 return tree_expr_nonnegative_warnv_p (op0,
15359 strict_overflow_p);
15360 }
15361 }
15362 else if (TREE_CODE (outer_type) == INTEGER_TYPE)
15363 {
15364 if (TREE_CODE (inner_type) == REAL_TYPE)
15365 return tree_expr_nonnegative_warnv_p (op0,
15366 strict_overflow_p);
15367 if (TREE_CODE (inner_type) == INTEGER_TYPE)
15368 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15369 && TYPE_UNSIGNED (inner_type);
15370 }
15371 }
15372 break;
15373
15374 default:
15375 return tree_simple_nonnegative_warnv_p (code, type);
15376 }
15377
15378 /* We don't know sign of `t', so be conservative and return false. */
15379 return false;
15380 }
15381
15382 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15383 value is based on the assumption that signed overflow is undefined,
15384 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15385 *STRICT_OVERFLOW_P. */
15386
15387 bool
15388 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15389 tree op1, bool *strict_overflow_p)
15390 {
15391 if (TYPE_UNSIGNED (type))
15392 return true;
15393
15394 switch (code)
15395 {
15396 case POINTER_PLUS_EXPR:
15397 case PLUS_EXPR:
15398 if (FLOAT_TYPE_P (type))
15399 return (tree_expr_nonnegative_warnv_p (op0,
15400 strict_overflow_p)
15401 && tree_expr_nonnegative_warnv_p (op1,
15402 strict_overflow_p));
15403
15404 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15405 both unsigned and at least 2 bits shorter than the result. */
15406 if (TREE_CODE (type) == INTEGER_TYPE
15407 && TREE_CODE (op0) == NOP_EXPR
15408 && TREE_CODE (op1) == NOP_EXPR)
15409 {
15410 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15411 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15412 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15413 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15414 {
15415 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15416 TYPE_PRECISION (inner2)) + 1;
15417 return prec < TYPE_PRECISION (type);
15418 }
15419 }
15420 break;
15421
15422 case MULT_EXPR:
15423 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15424 {
15425 /* x * x is always non-negative for floating point x
15426 or without overflow. */
15427 if (operand_equal_p (op0, op1, 0)
15428 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15429 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15430 {
15431 if (TYPE_OVERFLOW_UNDEFINED (type))
15432 *strict_overflow_p = true;
15433 return true;
15434 }
15435 }
15436
15437 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15438 both unsigned and their total bits is shorter than the result. */
15439 if (TREE_CODE (type) == INTEGER_TYPE
15440 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15441 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15442 {
15443 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15444 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15445 : TREE_TYPE (op0);
15446 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15447 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15448 : TREE_TYPE (op1);
15449
15450 bool unsigned0 = TYPE_UNSIGNED (inner0);
15451 bool unsigned1 = TYPE_UNSIGNED (inner1);
15452
15453 if (TREE_CODE (op0) == INTEGER_CST)
15454 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15455
15456 if (TREE_CODE (op1) == INTEGER_CST)
15457 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15458
15459 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15460 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15461 {
15462 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15463 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15464 : TYPE_PRECISION (inner0);
15465
15466 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15467 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15468 : TYPE_PRECISION (inner1);
15469
15470 return precision0 + precision1 < TYPE_PRECISION (type);
15471 }
15472 }
15473 return false;
15474
15475 case BIT_AND_EXPR:
15476 case MAX_EXPR:
15477 return (tree_expr_nonnegative_warnv_p (op0,
15478 strict_overflow_p)
15479 || tree_expr_nonnegative_warnv_p (op1,
15480 strict_overflow_p));
15481
15482 case BIT_IOR_EXPR:
15483 case BIT_XOR_EXPR:
15484 case MIN_EXPR:
15485 case RDIV_EXPR:
15486 case TRUNC_DIV_EXPR:
15487 case CEIL_DIV_EXPR:
15488 case FLOOR_DIV_EXPR:
15489 case ROUND_DIV_EXPR:
15490 return (tree_expr_nonnegative_warnv_p (op0,
15491 strict_overflow_p)
15492 && tree_expr_nonnegative_warnv_p (op1,
15493 strict_overflow_p));
15494
15495 case TRUNC_MOD_EXPR:
15496 case CEIL_MOD_EXPR:
15497 case FLOOR_MOD_EXPR:
15498 case ROUND_MOD_EXPR:
15499 return tree_expr_nonnegative_warnv_p (op0,
15500 strict_overflow_p);
15501 default:
15502 return tree_simple_nonnegative_warnv_p (code, type);
15503 }
15504
15505 /* We don't know sign of `t', so be conservative and return false. */
15506 return false;
15507 }
15508
15509 /* Return true if T is known to be non-negative. If the return
15510 value is based on the assumption that signed overflow is undefined,
15511 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15512 *STRICT_OVERFLOW_P. */
15513
15514 bool
15515 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15516 {
15517 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15518 return true;
15519
15520 switch (TREE_CODE (t))
15521 {
15522 case INTEGER_CST:
15523 return tree_int_cst_sgn (t) >= 0;
15524
15525 case REAL_CST:
15526 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15527
15528 case FIXED_CST:
15529 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15530
15531 case COND_EXPR:
15532 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15533 strict_overflow_p)
15534 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15535 strict_overflow_p));
15536 default:
15537 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15538 TREE_TYPE (t));
15539 }
15540 /* We don't know sign of `t', so be conservative and return false. */
15541 return false;
15542 }
15543
15544 /* Return true if T is known to be non-negative. If the return
15545 value is based on the assumption that signed overflow is undefined,
15546 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15547 *STRICT_OVERFLOW_P. */
15548
15549 bool
15550 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15551 tree arg0, tree arg1, bool *strict_overflow_p)
15552 {
15553 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15554 switch (DECL_FUNCTION_CODE (fndecl))
15555 {
15556 CASE_FLT_FN (BUILT_IN_ACOS):
15557 CASE_FLT_FN (BUILT_IN_ACOSH):
15558 CASE_FLT_FN (BUILT_IN_CABS):
15559 CASE_FLT_FN (BUILT_IN_COSH):
15560 CASE_FLT_FN (BUILT_IN_ERFC):
15561 CASE_FLT_FN (BUILT_IN_EXP):
15562 CASE_FLT_FN (BUILT_IN_EXP10):
15563 CASE_FLT_FN (BUILT_IN_EXP2):
15564 CASE_FLT_FN (BUILT_IN_FABS):
15565 CASE_FLT_FN (BUILT_IN_FDIM):
15566 CASE_FLT_FN (BUILT_IN_HYPOT):
15567 CASE_FLT_FN (BUILT_IN_POW10):
15568 CASE_INT_FN (BUILT_IN_FFS):
15569 CASE_INT_FN (BUILT_IN_PARITY):
15570 CASE_INT_FN (BUILT_IN_POPCOUNT):
15571 case BUILT_IN_BSWAP32:
15572 case BUILT_IN_BSWAP64:
15573 /* Always true. */
15574 return true;
15575
15576 CASE_FLT_FN (BUILT_IN_SQRT):
15577 /* sqrt(-0.0) is -0.0. */
15578 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15579 return true;
15580 return tree_expr_nonnegative_warnv_p (arg0,
15581 strict_overflow_p);
15582
15583 CASE_FLT_FN (BUILT_IN_ASINH):
15584 CASE_FLT_FN (BUILT_IN_ATAN):
15585 CASE_FLT_FN (BUILT_IN_ATANH):
15586 CASE_FLT_FN (BUILT_IN_CBRT):
15587 CASE_FLT_FN (BUILT_IN_CEIL):
15588 CASE_FLT_FN (BUILT_IN_ERF):
15589 CASE_FLT_FN (BUILT_IN_EXPM1):
15590 CASE_FLT_FN (BUILT_IN_FLOOR):
15591 CASE_FLT_FN (BUILT_IN_FMOD):
15592 CASE_FLT_FN (BUILT_IN_FREXP):
15593 CASE_FLT_FN (BUILT_IN_ICEIL):
15594 CASE_FLT_FN (BUILT_IN_IFLOOR):
15595 CASE_FLT_FN (BUILT_IN_IRINT):
15596 CASE_FLT_FN (BUILT_IN_IROUND):
15597 CASE_FLT_FN (BUILT_IN_LCEIL):
15598 CASE_FLT_FN (BUILT_IN_LDEXP):
15599 CASE_FLT_FN (BUILT_IN_LFLOOR):
15600 CASE_FLT_FN (BUILT_IN_LLCEIL):
15601 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15602 CASE_FLT_FN (BUILT_IN_LLRINT):
15603 CASE_FLT_FN (BUILT_IN_LLROUND):
15604 CASE_FLT_FN (BUILT_IN_LRINT):
15605 CASE_FLT_FN (BUILT_IN_LROUND):
15606 CASE_FLT_FN (BUILT_IN_MODF):
15607 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15608 CASE_FLT_FN (BUILT_IN_RINT):
15609 CASE_FLT_FN (BUILT_IN_ROUND):
15610 CASE_FLT_FN (BUILT_IN_SCALB):
15611 CASE_FLT_FN (BUILT_IN_SCALBLN):
15612 CASE_FLT_FN (BUILT_IN_SCALBN):
15613 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15614 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15615 CASE_FLT_FN (BUILT_IN_SINH):
15616 CASE_FLT_FN (BUILT_IN_TANH):
15617 CASE_FLT_FN (BUILT_IN_TRUNC):
15618 /* True if the 1st argument is nonnegative. */
15619 return tree_expr_nonnegative_warnv_p (arg0,
15620 strict_overflow_p);
15621
15622 CASE_FLT_FN (BUILT_IN_FMAX):
15623 /* True if the 1st OR 2nd arguments are nonnegative. */
15624 return (tree_expr_nonnegative_warnv_p (arg0,
15625 strict_overflow_p)
15626 || (tree_expr_nonnegative_warnv_p (arg1,
15627 strict_overflow_p)));
15628
15629 CASE_FLT_FN (BUILT_IN_FMIN):
15630 /* True if the 1st AND 2nd arguments are nonnegative. */
15631 return (tree_expr_nonnegative_warnv_p (arg0,
15632 strict_overflow_p)
15633 && (tree_expr_nonnegative_warnv_p (arg1,
15634 strict_overflow_p)));
15635
15636 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15637 /* True if the 2nd argument is nonnegative. */
15638 return tree_expr_nonnegative_warnv_p (arg1,
15639 strict_overflow_p);
15640
15641 CASE_FLT_FN (BUILT_IN_POWI):
15642 /* True if the 1st argument is nonnegative or the second
15643 argument is an even integer. */
15644 if (TREE_CODE (arg1) == INTEGER_CST
15645 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15646 return true;
15647 return tree_expr_nonnegative_warnv_p (arg0,
15648 strict_overflow_p);
15649
15650 CASE_FLT_FN (BUILT_IN_POW):
15651 /* True if the 1st argument is nonnegative or the second
15652 argument is an even integer valued real. */
15653 if (TREE_CODE (arg1) == REAL_CST)
15654 {
15655 REAL_VALUE_TYPE c;
15656 HOST_WIDE_INT n;
15657
15658 c = TREE_REAL_CST (arg1);
15659 n = real_to_integer (&c);
15660 if ((n & 1) == 0)
15661 {
15662 REAL_VALUE_TYPE cint;
15663 real_from_integer (&cint, VOIDmode, n,
15664 n < 0 ? -1 : 0, 0);
15665 if (real_identical (&c, &cint))
15666 return true;
15667 }
15668 }
15669 return tree_expr_nonnegative_warnv_p (arg0,
15670 strict_overflow_p);
15671
15672 default:
15673 break;
15674 }
15675 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15676 type);
15677 }
15678
15679 /* Return true if T is known to be non-negative. If the return
15680 value is based on the assumption that signed overflow is undefined,
15681 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15682 *STRICT_OVERFLOW_P. */
15683
15684 bool
15685 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15686 {
15687 enum tree_code code = TREE_CODE (t);
15688 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15689 return true;
15690
15691 switch (code)
15692 {
15693 case TARGET_EXPR:
15694 {
15695 tree temp = TARGET_EXPR_SLOT (t);
15696 t = TARGET_EXPR_INITIAL (t);
15697
15698 /* If the initializer is non-void, then it's a normal expression
15699 that will be assigned to the slot. */
15700 if (!VOID_TYPE_P (t))
15701 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15702
15703 /* Otherwise, the initializer sets the slot in some way. One common
15704 way is an assignment statement at the end of the initializer. */
15705 while (1)
15706 {
15707 if (TREE_CODE (t) == BIND_EXPR)
15708 t = expr_last (BIND_EXPR_BODY (t));
15709 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15710 || TREE_CODE (t) == TRY_CATCH_EXPR)
15711 t = expr_last (TREE_OPERAND (t, 0));
15712 else if (TREE_CODE (t) == STATEMENT_LIST)
15713 t = expr_last (t);
15714 else
15715 break;
15716 }
15717 if (TREE_CODE (t) == MODIFY_EXPR
15718 && TREE_OPERAND (t, 0) == temp)
15719 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15720 strict_overflow_p);
15721
15722 return false;
15723 }
15724
15725 case CALL_EXPR:
15726 {
15727 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15728 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15729
15730 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15731 get_callee_fndecl (t),
15732 arg0,
15733 arg1,
15734 strict_overflow_p);
15735 }
15736 case COMPOUND_EXPR:
15737 case MODIFY_EXPR:
15738 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15739 strict_overflow_p);
15740 case BIND_EXPR:
15741 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15742 strict_overflow_p);
15743 case SAVE_EXPR:
15744 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15745 strict_overflow_p);
15746
15747 default:
15748 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15749 TREE_TYPE (t));
15750 }
15751
15752 /* We don't know sign of `t', so be conservative and return false. */
15753 return false;
15754 }
15755
15756 /* Return true if T is known to be non-negative. If the return
15757 value is based on the assumption that signed overflow is undefined,
15758 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15759 *STRICT_OVERFLOW_P. */
15760
15761 bool
15762 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15763 {
15764 enum tree_code code;
15765 if (t == error_mark_node)
15766 return false;
15767
15768 code = TREE_CODE (t);
15769 switch (TREE_CODE_CLASS (code))
15770 {
15771 case tcc_binary:
15772 case tcc_comparison:
15773 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15774 TREE_TYPE (t),
15775 TREE_OPERAND (t, 0),
15776 TREE_OPERAND (t, 1),
15777 strict_overflow_p);
15778
15779 case tcc_unary:
15780 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15781 TREE_TYPE (t),
15782 TREE_OPERAND (t, 0),
15783 strict_overflow_p);
15784
15785 case tcc_constant:
15786 case tcc_declaration:
15787 case tcc_reference:
15788 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15789
15790 default:
15791 break;
15792 }
15793
15794 switch (code)
15795 {
15796 case TRUTH_AND_EXPR:
15797 case TRUTH_OR_EXPR:
15798 case TRUTH_XOR_EXPR:
15799 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15800 TREE_TYPE (t),
15801 TREE_OPERAND (t, 0),
15802 TREE_OPERAND (t, 1),
15803 strict_overflow_p);
15804 case TRUTH_NOT_EXPR:
15805 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15806 TREE_TYPE (t),
15807 TREE_OPERAND (t, 0),
15808 strict_overflow_p);
15809
15810 case COND_EXPR:
15811 case CONSTRUCTOR:
15812 case OBJ_TYPE_REF:
15813 case ASSERT_EXPR:
15814 case ADDR_EXPR:
15815 case WITH_SIZE_EXPR:
15816 case SSA_NAME:
15817 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15818
15819 default:
15820 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15821 }
15822 }
15823
15824 /* Return true if `t' is known to be non-negative. Handle warnings
15825 about undefined signed overflow. */
15826
15827 bool
15828 tree_expr_nonnegative_p (tree t)
15829 {
15830 bool ret, strict_overflow_p;
15831
15832 strict_overflow_p = false;
15833 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15834 if (strict_overflow_p)
15835 fold_overflow_warning (("assuming signed overflow does not occur when "
15836 "determining that expression is always "
15837 "non-negative"),
15838 WARN_STRICT_OVERFLOW_MISC);
15839 return ret;
15840 }
15841
15842
15843 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15844 For floating point we further ensure that T is not denormal.
15845 Similar logic is present in nonzero_address in rtlanal.h.
15846
15847 If the return value is based on the assumption that signed overflow
15848 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15849 change *STRICT_OVERFLOW_P. */
15850
15851 bool
15852 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15853 bool *strict_overflow_p)
15854 {
15855 switch (code)
15856 {
15857 case ABS_EXPR:
15858 return tree_expr_nonzero_warnv_p (op0,
15859 strict_overflow_p);
15860
15861 case NOP_EXPR:
15862 {
15863 tree inner_type = TREE_TYPE (op0);
15864 tree outer_type = type;
15865
15866 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15867 && tree_expr_nonzero_warnv_p (op0,
15868 strict_overflow_p));
15869 }
15870 break;
15871
15872 case NON_LVALUE_EXPR:
15873 return tree_expr_nonzero_warnv_p (op0,
15874 strict_overflow_p);
15875
15876 default:
15877 break;
15878 }
15879
15880 return false;
15881 }
15882
15883 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15884 For floating point we further ensure that T is not denormal.
15885 Similar logic is present in nonzero_address in rtlanal.h.
15886
15887 If the return value is based on the assumption that signed overflow
15888 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15889 change *STRICT_OVERFLOW_P. */
15890
15891 bool
15892 tree_binary_nonzero_warnv_p (enum tree_code code,
15893 tree type,
15894 tree op0,
15895 tree op1, bool *strict_overflow_p)
15896 {
15897 bool sub_strict_overflow_p;
15898 switch (code)
15899 {
15900 case POINTER_PLUS_EXPR:
15901 case PLUS_EXPR:
15902 if (TYPE_OVERFLOW_UNDEFINED (type))
15903 {
15904 /* With the presence of negative values it is hard
15905 to say something. */
15906 sub_strict_overflow_p = false;
15907 if (!tree_expr_nonnegative_warnv_p (op0,
15908 &sub_strict_overflow_p)
15909 || !tree_expr_nonnegative_warnv_p (op1,
15910 &sub_strict_overflow_p))
15911 return false;
15912 /* One of operands must be positive and the other non-negative. */
15913 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15914 overflows, on a twos-complement machine the sum of two
15915 nonnegative numbers can never be zero. */
15916 return (tree_expr_nonzero_warnv_p (op0,
15917 strict_overflow_p)
15918 || tree_expr_nonzero_warnv_p (op1,
15919 strict_overflow_p));
15920 }
15921 break;
15922
15923 case MULT_EXPR:
15924 if (TYPE_OVERFLOW_UNDEFINED (type))
15925 {
15926 if (tree_expr_nonzero_warnv_p (op0,
15927 strict_overflow_p)
15928 && tree_expr_nonzero_warnv_p (op1,
15929 strict_overflow_p))
15930 {
15931 *strict_overflow_p = true;
15932 return true;
15933 }
15934 }
15935 break;
15936
15937 case MIN_EXPR:
15938 sub_strict_overflow_p = false;
15939 if (tree_expr_nonzero_warnv_p (op0,
15940 &sub_strict_overflow_p)
15941 && tree_expr_nonzero_warnv_p (op1,
15942 &sub_strict_overflow_p))
15943 {
15944 if (sub_strict_overflow_p)
15945 *strict_overflow_p = true;
15946 }
15947 break;
15948
15949 case MAX_EXPR:
15950 sub_strict_overflow_p = false;
15951 if (tree_expr_nonzero_warnv_p (op0,
15952 &sub_strict_overflow_p))
15953 {
15954 if (sub_strict_overflow_p)
15955 *strict_overflow_p = true;
15956
15957 /* When both operands are nonzero, then MAX must be too. */
15958 if (tree_expr_nonzero_warnv_p (op1,
15959 strict_overflow_p))
15960 return true;
15961
15962 /* MAX where operand 0 is positive is positive. */
15963 return tree_expr_nonnegative_warnv_p (op0,
15964 strict_overflow_p);
15965 }
15966 /* MAX where operand 1 is positive is positive. */
15967 else if (tree_expr_nonzero_warnv_p (op1,
15968 &sub_strict_overflow_p)
15969 && tree_expr_nonnegative_warnv_p (op1,
15970 &sub_strict_overflow_p))
15971 {
15972 if (sub_strict_overflow_p)
15973 *strict_overflow_p = true;
15974 return true;
15975 }
15976 break;
15977
15978 case BIT_IOR_EXPR:
15979 return (tree_expr_nonzero_warnv_p (op1,
15980 strict_overflow_p)
15981 || tree_expr_nonzero_warnv_p (op0,
15982 strict_overflow_p));
15983
15984 default:
15985 break;
15986 }
15987
15988 return false;
15989 }
15990
15991 /* Return true when T is an address and is known to be nonzero.
15992 For floating point we further ensure that T is not denormal.
15993 Similar logic is present in nonzero_address in rtlanal.h.
15994
15995 If the return value is based on the assumption that signed overflow
15996 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15997 change *STRICT_OVERFLOW_P. */
15998
15999 bool
16000 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16001 {
16002 bool sub_strict_overflow_p;
16003 switch (TREE_CODE (t))
16004 {
16005 case INTEGER_CST:
16006 return !integer_zerop (t);
16007
16008 case ADDR_EXPR:
16009 {
16010 tree base = TREE_OPERAND (t, 0);
16011 if (!DECL_P (base))
16012 base = get_base_address (base);
16013
16014 if (!base)
16015 return false;
16016
16017 /* Weak declarations may link to NULL. Other things may also be NULL
16018 so protect with -fdelete-null-pointer-checks; but not variables
16019 allocated on the stack. */
16020 if (DECL_P (base)
16021 && (flag_delete_null_pointer_checks
16022 || (DECL_CONTEXT (base)
16023 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16024 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16025 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16026
16027 /* Constants are never weak. */
16028 if (CONSTANT_CLASS_P (base))
16029 return true;
16030
16031 return false;
16032 }
16033
16034 case COND_EXPR:
16035 sub_strict_overflow_p = false;
16036 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16037 &sub_strict_overflow_p)
16038 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16039 &sub_strict_overflow_p))
16040 {
16041 if (sub_strict_overflow_p)
16042 *strict_overflow_p = true;
16043 return true;
16044 }
16045 break;
16046
16047 default:
16048 break;
16049 }
16050 return false;
16051 }
16052
16053 /* Return true when T is an address and is known to be nonzero.
16054 For floating point we further ensure that T is not denormal.
16055 Similar logic is present in nonzero_address in rtlanal.h.
16056
16057 If the return value is based on the assumption that signed overflow
16058 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16059 change *STRICT_OVERFLOW_P. */
16060
16061 bool
16062 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16063 {
16064 tree type = TREE_TYPE (t);
16065 enum tree_code code;
16066
16067 /* Doing something useful for floating point would need more work. */
16068 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
16069 return false;
16070
16071 code = TREE_CODE (t);
16072 switch (TREE_CODE_CLASS (code))
16073 {
16074 case tcc_unary:
16075 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16076 strict_overflow_p);
16077 case tcc_binary:
16078 case tcc_comparison:
16079 return tree_binary_nonzero_warnv_p (code, type,
16080 TREE_OPERAND (t, 0),
16081 TREE_OPERAND (t, 1),
16082 strict_overflow_p);
16083 case tcc_constant:
16084 case tcc_declaration:
16085 case tcc_reference:
16086 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16087
16088 default:
16089 break;
16090 }
16091
16092 switch (code)
16093 {
16094 case TRUTH_NOT_EXPR:
16095 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
16096 strict_overflow_p);
16097
16098 case TRUTH_AND_EXPR:
16099 case TRUTH_OR_EXPR:
16100 case TRUTH_XOR_EXPR:
16101 return tree_binary_nonzero_warnv_p (code, type,
16102 TREE_OPERAND (t, 0),
16103 TREE_OPERAND (t, 1),
16104 strict_overflow_p);
16105
16106 case COND_EXPR:
16107 case CONSTRUCTOR:
16108 case OBJ_TYPE_REF:
16109 case ASSERT_EXPR:
16110 case ADDR_EXPR:
16111 case WITH_SIZE_EXPR:
16112 case SSA_NAME:
16113 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
16114
16115 case COMPOUND_EXPR:
16116 case MODIFY_EXPR:
16117 case BIND_EXPR:
16118 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16119 strict_overflow_p);
16120
16121 case SAVE_EXPR:
16122 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
16123 strict_overflow_p);
16124
16125 case CALL_EXPR:
16126 return alloca_call_p (t);
16127
16128 default:
16129 break;
16130 }
16131 return false;
16132 }
16133
16134 /* Return true when T is an address and is known to be nonzero.
16135 Handle warnings about undefined signed overflow. */
16136
16137 bool
16138 tree_expr_nonzero_p (tree t)
16139 {
16140 bool ret, strict_overflow_p;
16141
16142 strict_overflow_p = false;
16143 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
16144 if (strict_overflow_p)
16145 fold_overflow_warning (("assuming signed overflow does not occur when "
16146 "determining that expression is always "
16147 "non-zero"),
16148 WARN_STRICT_OVERFLOW_MISC);
16149 return ret;
16150 }
16151
16152 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16153 attempt to fold the expression to a constant without modifying TYPE,
16154 OP0 or OP1.
16155
16156 If the expression could be simplified to a constant, then return
16157 the constant. If the expression would not be simplified to a
16158 constant, then return NULL_TREE. */
16159
16160 tree
16161 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16162 {
16163 tree tem = fold_binary (code, type, op0, op1);
16164 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16165 }
16166
16167 /* Given the components of a unary expression CODE, TYPE and OP0,
16168 attempt to fold the expression to a constant without modifying
16169 TYPE or OP0.
16170
16171 If the expression could be simplified to a constant, then return
16172 the constant. If the expression would not be simplified to a
16173 constant, then return NULL_TREE. */
16174
16175 tree
16176 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16177 {
16178 tree tem = fold_unary (code, type, op0);
16179 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16180 }
16181
16182 /* If EXP represents referencing an element in a constant string
16183 (either via pointer arithmetic or array indexing), return the
16184 tree representing the value accessed, otherwise return NULL. */
16185
16186 tree
16187 fold_read_from_constant_string (tree exp)
16188 {
16189 if ((TREE_CODE (exp) == INDIRECT_REF
16190 || TREE_CODE (exp) == ARRAY_REF)
16191 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16192 {
16193 tree exp1 = TREE_OPERAND (exp, 0);
16194 tree index;
16195 tree string;
16196 location_t loc = EXPR_LOCATION (exp);
16197
16198 if (TREE_CODE (exp) == INDIRECT_REF)
16199 string = string_constant (exp1, &index);
16200 else
16201 {
16202 tree low_bound = array_ref_low_bound (exp);
16203 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16204
16205 /* Optimize the special-case of a zero lower bound.
16206
16207 We convert the low_bound to sizetype to avoid some problems
16208 with constant folding. (E.g. suppose the lower bound is 1,
16209 and its mode is QI. Without the conversion,l (ARRAY
16210 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16211 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16212 if (! integer_zerop (low_bound))
16213 index = size_diffop_loc (loc, index,
16214 fold_convert_loc (loc, sizetype, low_bound));
16215
16216 string = exp1;
16217 }
16218
16219 if (string
16220 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16221 && TREE_CODE (string) == STRING_CST
16222 && TREE_CODE (index) == INTEGER_CST
16223 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16224 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16225 == MODE_INT)
16226 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16227 return build_int_cst_type (TREE_TYPE (exp),
16228 (TREE_STRING_POINTER (string)
16229 [TREE_INT_CST_LOW (index)]));
16230 }
16231 return NULL;
16232 }
16233
16234 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16235 an integer constant, real, or fixed-point constant.
16236
16237 TYPE is the type of the result. */
16238
16239 static tree
16240 fold_negate_const (tree arg0, tree type)
16241 {
16242 tree t = NULL_TREE;
16243
16244 switch (TREE_CODE (arg0))
16245 {
16246 case INTEGER_CST:
16247 {
16248 double_int val = tree_to_double_int (arg0);
16249 bool overflow;
16250 val = val.neg_with_overflow (&overflow);
16251 t = force_fit_type_double (type, val, 1,
16252 (overflow | TREE_OVERFLOW (arg0))
16253 && !TYPE_UNSIGNED (type));
16254 break;
16255 }
16256
16257 case REAL_CST:
16258 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16259 break;
16260
16261 case FIXED_CST:
16262 {
16263 FIXED_VALUE_TYPE f;
16264 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16265 &(TREE_FIXED_CST (arg0)), NULL,
16266 TYPE_SATURATING (type));
16267 t = build_fixed (type, f);
16268 /* Propagate overflow flags. */
16269 if (overflow_p | TREE_OVERFLOW (arg0))
16270 TREE_OVERFLOW (t) = 1;
16271 break;
16272 }
16273
16274 default:
16275 gcc_unreachable ();
16276 }
16277
16278 return t;
16279 }
16280
16281 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16282 an integer constant or real constant.
16283
16284 TYPE is the type of the result. */
16285
16286 tree
16287 fold_abs_const (tree arg0, tree type)
16288 {
16289 tree t = NULL_TREE;
16290
16291 switch (TREE_CODE (arg0))
16292 {
16293 case INTEGER_CST:
16294 {
16295 double_int val = tree_to_double_int (arg0);
16296
16297 /* If the value is unsigned or non-negative, then the absolute value
16298 is the same as the ordinary value. */
16299 if (TYPE_UNSIGNED (type)
16300 || !val.is_negative ())
16301 t = arg0;
16302
16303 /* If the value is negative, then the absolute value is
16304 its negation. */
16305 else
16306 {
16307 bool overflow;
16308 val = val.neg_with_overflow (&overflow);
16309 t = force_fit_type_double (type, val, -1,
16310 overflow | TREE_OVERFLOW (arg0));
16311 }
16312 }
16313 break;
16314
16315 case REAL_CST:
16316 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16317 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16318 else
16319 t = arg0;
16320 break;
16321
16322 default:
16323 gcc_unreachable ();
16324 }
16325
16326 return t;
16327 }
16328
16329 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16330 constant. TYPE is the type of the result. */
16331
16332 static tree
16333 fold_not_const (const_tree arg0, tree type)
16334 {
16335 double_int val;
16336
16337 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16338
16339 val = ~tree_to_double_int (arg0);
16340 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16341 }
16342
16343 /* Given CODE, a relational operator, the target type, TYPE and two
16344 constant operands OP0 and OP1, return the result of the
16345 relational operation. If the result is not a compile time
16346 constant, then return NULL_TREE. */
16347
16348 static tree
16349 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16350 {
16351 int result, invert;
16352
16353 /* From here on, the only cases we handle are when the result is
16354 known to be a constant. */
16355
16356 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16357 {
16358 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16359 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16360
16361 /* Handle the cases where either operand is a NaN. */
16362 if (real_isnan (c0) || real_isnan (c1))
16363 {
16364 switch (code)
16365 {
16366 case EQ_EXPR:
16367 case ORDERED_EXPR:
16368 result = 0;
16369 break;
16370
16371 case NE_EXPR:
16372 case UNORDERED_EXPR:
16373 case UNLT_EXPR:
16374 case UNLE_EXPR:
16375 case UNGT_EXPR:
16376 case UNGE_EXPR:
16377 case UNEQ_EXPR:
16378 result = 1;
16379 break;
16380
16381 case LT_EXPR:
16382 case LE_EXPR:
16383 case GT_EXPR:
16384 case GE_EXPR:
16385 case LTGT_EXPR:
16386 if (flag_trapping_math)
16387 return NULL_TREE;
16388 result = 0;
16389 break;
16390
16391 default:
16392 gcc_unreachable ();
16393 }
16394
16395 return constant_boolean_node (result, type);
16396 }
16397
16398 return constant_boolean_node (real_compare (code, c0, c1), type);
16399 }
16400
16401 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16402 {
16403 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16404 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16405 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16406 }
16407
16408 /* Handle equality/inequality of complex constants. */
16409 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16410 {
16411 tree rcond = fold_relational_const (code, type,
16412 TREE_REALPART (op0),
16413 TREE_REALPART (op1));
16414 tree icond = fold_relational_const (code, type,
16415 TREE_IMAGPART (op0),
16416 TREE_IMAGPART (op1));
16417 if (code == EQ_EXPR)
16418 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16419 else if (code == NE_EXPR)
16420 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16421 else
16422 return NULL_TREE;
16423 }
16424
16425 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16426 {
16427 unsigned count = VECTOR_CST_NELTS (op0);
16428 tree *elts = XALLOCAVEC (tree, count);
16429 gcc_assert (VECTOR_CST_NELTS (op1) == count
16430 && TYPE_VECTOR_SUBPARTS (type) == count);
16431
16432 for (unsigned i = 0; i < count; i++)
16433 {
16434 tree elem_type = TREE_TYPE (type);
16435 tree elem0 = VECTOR_CST_ELT (op0, i);
16436 tree elem1 = VECTOR_CST_ELT (op1, i);
16437
16438 tree tem = fold_relational_const (code, elem_type,
16439 elem0, elem1);
16440
16441 if (tem == NULL_TREE)
16442 return NULL_TREE;
16443
16444 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16445 }
16446
16447 return build_vector (type, elts);
16448 }
16449
16450 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16451
16452 To compute GT, swap the arguments and do LT.
16453 To compute GE, do LT and invert the result.
16454 To compute LE, swap the arguments, do LT and invert the result.
16455 To compute NE, do EQ and invert the result.
16456
16457 Therefore, the code below must handle only EQ and LT. */
16458
16459 if (code == LE_EXPR || code == GT_EXPR)
16460 {
16461 tree tem = op0;
16462 op0 = op1;
16463 op1 = tem;
16464 code = swap_tree_comparison (code);
16465 }
16466
16467 /* Note that it is safe to invert for real values here because we
16468 have already handled the one case that it matters. */
16469
16470 invert = 0;
16471 if (code == NE_EXPR || code == GE_EXPR)
16472 {
16473 invert = 1;
16474 code = invert_tree_comparison (code, false);
16475 }
16476
16477 /* Compute a result for LT or EQ if args permit;
16478 Otherwise return T. */
16479 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16480 {
16481 if (code == EQ_EXPR)
16482 result = tree_int_cst_equal (op0, op1);
16483 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16484 result = INT_CST_LT_UNSIGNED (op0, op1);
16485 else
16486 result = INT_CST_LT (op0, op1);
16487 }
16488 else
16489 return NULL_TREE;
16490
16491 if (invert)
16492 result ^= 1;
16493 return constant_boolean_node (result, type);
16494 }
16495
16496 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16497 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16498 itself. */
16499
16500 tree
16501 fold_build_cleanup_point_expr (tree type, tree expr)
16502 {
16503 /* If the expression does not have side effects then we don't have to wrap
16504 it with a cleanup point expression. */
16505 if (!TREE_SIDE_EFFECTS (expr))
16506 return expr;
16507
16508 /* If the expression is a return, check to see if the expression inside the
16509 return has no side effects or the right hand side of the modify expression
16510 inside the return. If either don't have side effects set we don't need to
16511 wrap the expression in a cleanup point expression. Note we don't check the
16512 left hand side of the modify because it should always be a return decl. */
16513 if (TREE_CODE (expr) == RETURN_EXPR)
16514 {
16515 tree op = TREE_OPERAND (expr, 0);
16516 if (!op || !TREE_SIDE_EFFECTS (op))
16517 return expr;
16518 op = TREE_OPERAND (op, 1);
16519 if (!TREE_SIDE_EFFECTS (op))
16520 return expr;
16521 }
16522
16523 return build1 (CLEANUP_POINT_EXPR, type, expr);
16524 }
16525
16526 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16527 of an indirection through OP0, or NULL_TREE if no simplification is
16528 possible. */
16529
16530 tree
16531 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16532 {
16533 tree sub = op0;
16534 tree subtype;
16535
16536 STRIP_NOPS (sub);
16537 subtype = TREE_TYPE (sub);
16538 if (!POINTER_TYPE_P (subtype))
16539 return NULL_TREE;
16540
16541 if (TREE_CODE (sub) == ADDR_EXPR)
16542 {
16543 tree op = TREE_OPERAND (sub, 0);
16544 tree optype = TREE_TYPE (op);
16545 /* *&CONST_DECL -> to the value of the const decl. */
16546 if (TREE_CODE (op) == CONST_DECL)
16547 return DECL_INITIAL (op);
16548 /* *&p => p; make sure to handle *&"str"[cst] here. */
16549 if (type == optype)
16550 {
16551 tree fop = fold_read_from_constant_string (op);
16552 if (fop)
16553 return fop;
16554 else
16555 return op;
16556 }
16557 /* *(foo *)&fooarray => fooarray[0] */
16558 else if (TREE_CODE (optype) == ARRAY_TYPE
16559 && type == TREE_TYPE (optype)
16560 && (!in_gimple_form
16561 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16562 {
16563 tree type_domain = TYPE_DOMAIN (optype);
16564 tree min_val = size_zero_node;
16565 if (type_domain && TYPE_MIN_VALUE (type_domain))
16566 min_val = TYPE_MIN_VALUE (type_domain);
16567 if (in_gimple_form
16568 && TREE_CODE (min_val) != INTEGER_CST)
16569 return NULL_TREE;
16570 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16571 NULL_TREE, NULL_TREE);
16572 }
16573 /* *(foo *)&complexfoo => __real__ complexfoo */
16574 else if (TREE_CODE (optype) == COMPLEX_TYPE
16575 && type == TREE_TYPE (optype))
16576 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16577 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16578 else if (TREE_CODE (optype) == VECTOR_TYPE
16579 && type == TREE_TYPE (optype))
16580 {
16581 tree part_width = TYPE_SIZE (type);
16582 tree index = bitsize_int (0);
16583 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16584 }
16585 }
16586
16587 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16588 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16589 {
16590 tree op00 = TREE_OPERAND (sub, 0);
16591 tree op01 = TREE_OPERAND (sub, 1);
16592
16593 STRIP_NOPS (op00);
16594 if (TREE_CODE (op00) == ADDR_EXPR)
16595 {
16596 tree op00type;
16597 op00 = TREE_OPERAND (op00, 0);
16598 op00type = TREE_TYPE (op00);
16599
16600 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16601 if (TREE_CODE (op00type) == VECTOR_TYPE
16602 && type == TREE_TYPE (op00type))
16603 {
16604 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16605 tree part_width = TYPE_SIZE (type);
16606 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16607 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16608 tree index = bitsize_int (indexi);
16609
16610 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16611 return fold_build3_loc (loc,
16612 BIT_FIELD_REF, type, op00,
16613 part_width, index);
16614
16615 }
16616 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16617 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16618 && type == TREE_TYPE (op00type))
16619 {
16620 tree size = TYPE_SIZE_UNIT (type);
16621 if (tree_int_cst_equal (size, op01))
16622 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16623 }
16624 /* ((foo *)&fooarray)[1] => fooarray[1] */
16625 else if (TREE_CODE (op00type) == ARRAY_TYPE
16626 && type == TREE_TYPE (op00type))
16627 {
16628 tree type_domain = TYPE_DOMAIN (op00type);
16629 tree min_val = size_zero_node;
16630 if (type_domain && TYPE_MIN_VALUE (type_domain))
16631 min_val = TYPE_MIN_VALUE (type_domain);
16632 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16633 TYPE_SIZE_UNIT (type));
16634 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16635 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16636 NULL_TREE, NULL_TREE);
16637 }
16638 }
16639 }
16640
16641 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16642 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16643 && type == TREE_TYPE (TREE_TYPE (subtype))
16644 && (!in_gimple_form
16645 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16646 {
16647 tree type_domain;
16648 tree min_val = size_zero_node;
16649 sub = build_fold_indirect_ref_loc (loc, sub);
16650 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16651 if (type_domain && TYPE_MIN_VALUE (type_domain))
16652 min_val = TYPE_MIN_VALUE (type_domain);
16653 if (in_gimple_form
16654 && TREE_CODE (min_val) != INTEGER_CST)
16655 return NULL_TREE;
16656 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16657 NULL_TREE);
16658 }
16659
16660 return NULL_TREE;
16661 }
16662
16663 /* Builds an expression for an indirection through T, simplifying some
16664 cases. */
16665
16666 tree
16667 build_fold_indirect_ref_loc (location_t loc, tree t)
16668 {
16669 tree type = TREE_TYPE (TREE_TYPE (t));
16670 tree sub = fold_indirect_ref_1 (loc, type, t);
16671
16672 if (sub)
16673 return sub;
16674
16675 return build1_loc (loc, INDIRECT_REF, type, t);
16676 }
16677
16678 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16679
16680 tree
16681 fold_indirect_ref_loc (location_t loc, tree t)
16682 {
16683 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16684
16685 if (sub)
16686 return sub;
16687 else
16688 return t;
16689 }
16690
16691 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16692 whose result is ignored. The type of the returned tree need not be
16693 the same as the original expression. */
16694
16695 tree
16696 fold_ignored_result (tree t)
16697 {
16698 if (!TREE_SIDE_EFFECTS (t))
16699 return integer_zero_node;
16700
16701 for (;;)
16702 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16703 {
16704 case tcc_unary:
16705 t = TREE_OPERAND (t, 0);
16706 break;
16707
16708 case tcc_binary:
16709 case tcc_comparison:
16710 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16711 t = TREE_OPERAND (t, 0);
16712 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16713 t = TREE_OPERAND (t, 1);
16714 else
16715 return t;
16716 break;
16717
16718 case tcc_expression:
16719 switch (TREE_CODE (t))
16720 {
16721 case COMPOUND_EXPR:
16722 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16723 return t;
16724 t = TREE_OPERAND (t, 0);
16725 break;
16726
16727 case COND_EXPR:
16728 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16729 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16730 return t;
16731 t = TREE_OPERAND (t, 0);
16732 break;
16733
16734 default:
16735 return t;
16736 }
16737 break;
16738
16739 default:
16740 return t;
16741 }
16742 }
16743
16744 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16745 This can only be applied to objects of a sizetype. */
16746
16747 tree
16748 round_up_loc (location_t loc, tree value, int divisor)
16749 {
16750 tree div = NULL_TREE;
16751
16752 gcc_assert (divisor > 0);
16753 if (divisor == 1)
16754 return value;
16755
16756 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16757 have to do anything. Only do this when we are not given a const,
16758 because in that case, this check is more expensive than just
16759 doing it. */
16760 if (TREE_CODE (value) != INTEGER_CST)
16761 {
16762 div = build_int_cst (TREE_TYPE (value), divisor);
16763
16764 if (multiple_of_p (TREE_TYPE (value), value, div))
16765 return value;
16766 }
16767
16768 /* If divisor is a power of two, simplify this to bit manipulation. */
16769 if (divisor == (divisor & -divisor))
16770 {
16771 if (TREE_CODE (value) == INTEGER_CST)
16772 {
16773 double_int val = tree_to_double_int (value);
16774 bool overflow_p;
16775
16776 if ((val.low & (divisor - 1)) == 0)
16777 return value;
16778
16779 overflow_p = TREE_OVERFLOW (value);
16780 val.low &= ~(divisor - 1);
16781 val.low += divisor;
16782 if (val.low == 0)
16783 {
16784 val.high++;
16785 if (val.high == 0)
16786 overflow_p = true;
16787 }
16788
16789 return force_fit_type_double (TREE_TYPE (value), val,
16790 -1, overflow_p);
16791 }
16792 else
16793 {
16794 tree t;
16795
16796 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16797 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16798 t = build_int_cst (TREE_TYPE (value), -divisor);
16799 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16800 }
16801 }
16802 else
16803 {
16804 if (!div)
16805 div = build_int_cst (TREE_TYPE (value), divisor);
16806 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16807 value = size_binop_loc (loc, MULT_EXPR, value, div);
16808 }
16809
16810 return value;
16811 }
16812
16813 /* Likewise, but round down. */
16814
16815 tree
16816 round_down_loc (location_t loc, tree value, int divisor)
16817 {
16818 tree div = NULL_TREE;
16819
16820 gcc_assert (divisor > 0);
16821 if (divisor == 1)
16822 return value;
16823
16824 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16825 have to do anything. Only do this when we are not given a const,
16826 because in that case, this check is more expensive than just
16827 doing it. */
16828 if (TREE_CODE (value) != INTEGER_CST)
16829 {
16830 div = build_int_cst (TREE_TYPE (value), divisor);
16831
16832 if (multiple_of_p (TREE_TYPE (value), value, div))
16833 return value;
16834 }
16835
16836 /* If divisor is a power of two, simplify this to bit manipulation. */
16837 if (divisor == (divisor & -divisor))
16838 {
16839 tree t;
16840
16841 t = build_int_cst (TREE_TYPE (value), -divisor);
16842 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16843 }
16844 else
16845 {
16846 if (!div)
16847 div = build_int_cst (TREE_TYPE (value), divisor);
16848 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16849 value = size_binop_loc (loc, MULT_EXPR, value, div);
16850 }
16851
16852 return value;
16853 }
16854
16855 /* Returns the pointer to the base of the object addressed by EXP and
16856 extracts the information about the offset of the access, storing it
16857 to PBITPOS and POFFSET. */
16858
16859 static tree
16860 split_address_to_core_and_offset (tree exp,
16861 HOST_WIDE_INT *pbitpos, tree *poffset)
16862 {
16863 tree core;
16864 enum machine_mode mode;
16865 int unsignedp, volatilep;
16866 HOST_WIDE_INT bitsize;
16867 location_t loc = EXPR_LOCATION (exp);
16868
16869 if (TREE_CODE (exp) == ADDR_EXPR)
16870 {
16871 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16872 poffset, &mode, &unsignedp, &volatilep,
16873 false);
16874 core = build_fold_addr_expr_loc (loc, core);
16875 }
16876 else
16877 {
16878 core = exp;
16879 *pbitpos = 0;
16880 *poffset = NULL_TREE;
16881 }
16882
16883 return core;
16884 }
16885
16886 /* Returns true if addresses of E1 and E2 differ by a constant, false
16887 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16888
16889 bool
16890 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16891 {
16892 tree core1, core2;
16893 HOST_WIDE_INT bitpos1, bitpos2;
16894 tree toffset1, toffset2, tdiff, type;
16895
16896 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16897 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16898
16899 if (bitpos1 % BITS_PER_UNIT != 0
16900 || bitpos2 % BITS_PER_UNIT != 0
16901 || !operand_equal_p (core1, core2, 0))
16902 return false;
16903
16904 if (toffset1 && toffset2)
16905 {
16906 type = TREE_TYPE (toffset1);
16907 if (type != TREE_TYPE (toffset2))
16908 toffset2 = fold_convert (type, toffset2);
16909
16910 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16911 if (!cst_and_fits_in_hwi (tdiff))
16912 return false;
16913
16914 *diff = int_cst_value (tdiff);
16915 }
16916 else if (toffset1 || toffset2)
16917 {
16918 /* If only one of the offsets is non-constant, the difference cannot
16919 be a constant. */
16920 return false;
16921 }
16922 else
16923 *diff = 0;
16924
16925 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16926 return true;
16927 }
16928
16929 /* Simplify the floating point expression EXP when the sign of the
16930 result is not significant. Return NULL_TREE if no simplification
16931 is possible. */
16932
16933 tree
16934 fold_strip_sign_ops (tree exp)
16935 {
16936 tree arg0, arg1;
16937 location_t loc = EXPR_LOCATION (exp);
16938
16939 switch (TREE_CODE (exp))
16940 {
16941 case ABS_EXPR:
16942 case NEGATE_EXPR:
16943 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16944 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16945
16946 case MULT_EXPR:
16947 case RDIV_EXPR:
16948 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
16949 return NULL_TREE;
16950 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16951 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16952 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16953 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16954 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16955 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16956 break;
16957
16958 case COMPOUND_EXPR:
16959 arg0 = TREE_OPERAND (exp, 0);
16960 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16961 if (arg1)
16962 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16963 break;
16964
16965 case COND_EXPR:
16966 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16967 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16968 if (arg0 || arg1)
16969 return fold_build3_loc (loc,
16970 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16971 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16972 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16973 break;
16974
16975 case CALL_EXPR:
16976 {
16977 const enum built_in_function fcode = builtin_mathfn_code (exp);
16978 switch (fcode)
16979 {
16980 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16981 /* Strip copysign function call, return the 1st argument. */
16982 arg0 = CALL_EXPR_ARG (exp, 0);
16983 arg1 = CALL_EXPR_ARG (exp, 1);
16984 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16985
16986 default:
16987 /* Strip sign ops from the argument of "odd" math functions. */
16988 if (negate_mathfn_p (fcode))
16989 {
16990 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16991 if (arg0)
16992 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16993 }
16994 break;
16995 }
16996 }
16997 break;
16998
16999 default:
17000 break;
17001 }
17002 return NULL_TREE;
17003 }