dwarf2out.c (gen_enumeration_type_die): Remove unnecessary host_integerp test.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2013 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "tree.h"
49 #include "realmpfr.h"
50 #include "rtl.h"
51 #include "expr.h"
52 #include "tm_p.h"
53 #include "target.h"
54 #include "diagnostic-core.h"
55 #include "intl.h"
56 #include "ggc.h"
57 #include "hash-table.h"
58 #include "langhooks.h"
59 #include "md5.h"
60 #include "gimple.h"
61 #include "gimplify.h"
62 #include "tree-dfa.h"
63
64 /* Nonzero if we are folding constants inside an initializer; zero
65 otherwise. */
66 int folding_initializer = 0;
67
68 /* The following constants represent a bit based encoding of GCC's
69 comparison operators. This encoding simplifies transformations
70 on relational comparison operators, such as AND and OR. */
71 enum comparison_code {
72 COMPCODE_FALSE = 0,
73 COMPCODE_LT = 1,
74 COMPCODE_EQ = 2,
75 COMPCODE_LE = 3,
76 COMPCODE_GT = 4,
77 COMPCODE_LTGT = 5,
78 COMPCODE_GE = 6,
79 COMPCODE_ORD = 7,
80 COMPCODE_UNORD = 8,
81 COMPCODE_UNLT = 9,
82 COMPCODE_UNEQ = 10,
83 COMPCODE_UNLE = 11,
84 COMPCODE_UNGT = 12,
85 COMPCODE_NE = 13,
86 COMPCODE_UNGE = 14,
87 COMPCODE_TRUE = 15
88 };
89
90 static bool negate_mathfn_p (enum built_in_function);
91 static bool negate_expr_p (tree);
92 static tree negate_expr (tree);
93 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
94 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
95 static tree const_binop (enum tree_code, tree, tree);
96 static enum comparison_code comparison_to_compcode (enum tree_code);
97 static enum tree_code compcode_to_comparison (enum comparison_code);
98 static int operand_equal_for_comparison_p (tree, tree, tree);
99 static int twoval_comparison_p (tree, tree *, tree *, int *);
100 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
101 static tree pedantic_omit_one_operand_loc (location_t, tree, tree, tree);
102 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
103 static tree make_bit_field_ref (location_t, tree, tree,
104 HOST_WIDE_INT, HOST_WIDE_INT, int);
105 static tree optimize_bit_field_compare (location_t, enum tree_code,
106 tree, tree, tree);
107 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
108 HOST_WIDE_INT *,
109 enum machine_mode *, int *, int *,
110 tree *, tree *);
111 static int all_ones_mask_p (const_tree, int);
112 static tree sign_bit_p (tree, const_tree);
113 static int simple_operand_p (const_tree);
114 static bool simple_operand_p_2 (tree);
115 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
116 static tree range_predecessor (tree);
117 static tree range_successor (tree);
118 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
119 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
120 static tree unextend (tree, int, int, tree);
121 static tree optimize_minmax_comparison (location_t, enum tree_code,
122 tree, tree, tree);
123 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
124 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
125 static tree fold_binary_op_with_conditional_arg (location_t,
126 enum tree_code, tree,
127 tree, tree,
128 tree, tree, int);
129 static tree fold_mathfn_compare (location_t,
130 enum built_in_function, enum tree_code,
131 tree, tree, tree);
132 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
133 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
134 static bool reorder_operands_p (const_tree, const_tree);
135 static tree fold_negate_const (tree, tree);
136 static tree fold_not_const (const_tree, tree);
137 static tree fold_relational_const (enum tree_code, tree, tree, tree);
138 static tree fold_convert_const (enum tree_code, tree, tree);
139
140 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
141 Otherwise, return LOC. */
142
143 static location_t
144 expr_location_or (tree t, location_t loc)
145 {
146 location_t tloc = EXPR_LOCATION (t);
147 return tloc == UNKNOWN_LOCATION ? loc : tloc;
148 }
149
150 /* Similar to protected_set_expr_location, but never modify x in place,
151 if location can and needs to be set, unshare it. */
152
153 static inline tree
154 protected_set_expr_location_unshare (tree x, location_t loc)
155 {
156 if (CAN_HAVE_LOCATION_P (x)
157 && EXPR_LOCATION (x) != loc
158 && !(TREE_CODE (x) == SAVE_EXPR
159 || TREE_CODE (x) == TARGET_EXPR
160 || TREE_CODE (x) == BIND_EXPR))
161 {
162 x = copy_node (x);
163 SET_EXPR_LOCATION (x, loc);
164 }
165 return x;
166 }
167 \f
168 /* If ARG2 divides ARG1 with zero remainder, carries out the division
169 of type CODE and returns the quotient.
170 Otherwise returns NULL_TREE. */
171
172 tree
173 div_if_zero_remainder (enum tree_code code, const_tree arg1, const_tree arg2)
174 {
175 double_int quo, rem;
176 int uns;
177
178 /* The sign of the division is according to operand two, that
179 does the correct thing for POINTER_PLUS_EXPR where we want
180 a signed division. */
181 uns = TYPE_UNSIGNED (TREE_TYPE (arg2));
182
183 quo = tree_to_double_int (arg1).divmod (tree_to_double_int (arg2),
184 uns, code, &rem);
185
186 if (rem.is_zero ())
187 return build_int_cst_wide (TREE_TYPE (arg1), quo.low, quo.high);
188
189 return NULL_TREE;
190 }
191 \f
192 /* This is nonzero if we should defer warnings about undefined
193 overflow. This facility exists because these warnings are a
194 special case. The code to estimate loop iterations does not want
195 to issue any warnings, since it works with expressions which do not
196 occur in user code. Various bits of cleanup code call fold(), but
197 only use the result if it has certain characteristics (e.g., is a
198 constant); that code only wants to issue a warning if the result is
199 used. */
200
201 static int fold_deferring_overflow_warnings;
202
203 /* If a warning about undefined overflow is deferred, this is the
204 warning. Note that this may cause us to turn two warnings into
205 one, but that is fine since it is sufficient to only give one
206 warning per expression. */
207
208 static const char* fold_deferred_overflow_warning;
209
210 /* If a warning about undefined overflow is deferred, this is the
211 level at which the warning should be emitted. */
212
213 static enum warn_strict_overflow_code fold_deferred_overflow_code;
214
215 /* Start deferring overflow warnings. We could use a stack here to
216 permit nested calls, but at present it is not necessary. */
217
218 void
219 fold_defer_overflow_warnings (void)
220 {
221 ++fold_deferring_overflow_warnings;
222 }
223
224 /* Stop deferring overflow warnings. If there is a pending warning,
225 and ISSUE is true, then issue the warning if appropriate. STMT is
226 the statement with which the warning should be associated (used for
227 location information); STMT may be NULL. CODE is the level of the
228 warning--a warn_strict_overflow_code value. This function will use
229 the smaller of CODE and the deferred code when deciding whether to
230 issue the warning. CODE may be zero to mean to always use the
231 deferred code. */
232
233 void
234 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
235 {
236 const char *warnmsg;
237 location_t locus;
238
239 gcc_assert (fold_deferring_overflow_warnings > 0);
240 --fold_deferring_overflow_warnings;
241 if (fold_deferring_overflow_warnings > 0)
242 {
243 if (fold_deferred_overflow_warning != NULL
244 && code != 0
245 && code < (int) fold_deferred_overflow_code)
246 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
247 return;
248 }
249
250 warnmsg = fold_deferred_overflow_warning;
251 fold_deferred_overflow_warning = NULL;
252
253 if (!issue || warnmsg == NULL)
254 return;
255
256 if (gimple_no_warning_p (stmt))
257 return;
258
259 /* Use the smallest code level when deciding to issue the
260 warning. */
261 if (code == 0 || code > (int) fold_deferred_overflow_code)
262 code = fold_deferred_overflow_code;
263
264 if (!issue_strict_overflow_warning (code))
265 return;
266
267 if (stmt == NULL)
268 locus = input_location;
269 else
270 locus = gimple_location (stmt);
271 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
272 }
273
274 /* Stop deferring overflow warnings, ignoring any deferred
275 warnings. */
276
277 void
278 fold_undefer_and_ignore_overflow_warnings (void)
279 {
280 fold_undefer_overflow_warnings (false, NULL, 0);
281 }
282
283 /* Whether we are deferring overflow warnings. */
284
285 bool
286 fold_deferring_overflow_warnings_p (void)
287 {
288 return fold_deferring_overflow_warnings > 0;
289 }
290
291 /* This is called when we fold something based on the fact that signed
292 overflow is undefined. */
293
294 static void
295 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
296 {
297 if (fold_deferring_overflow_warnings > 0)
298 {
299 if (fold_deferred_overflow_warning == NULL
300 || wc < fold_deferred_overflow_code)
301 {
302 fold_deferred_overflow_warning = gmsgid;
303 fold_deferred_overflow_code = wc;
304 }
305 }
306 else if (issue_strict_overflow_warning (wc))
307 warning (OPT_Wstrict_overflow, gmsgid);
308 }
309 \f
310 /* Return true if the built-in mathematical function specified by CODE
311 is odd, i.e. -f(x) == f(-x). */
312
313 static bool
314 negate_mathfn_p (enum built_in_function code)
315 {
316 switch (code)
317 {
318 CASE_FLT_FN (BUILT_IN_ASIN):
319 CASE_FLT_FN (BUILT_IN_ASINH):
320 CASE_FLT_FN (BUILT_IN_ATAN):
321 CASE_FLT_FN (BUILT_IN_ATANH):
322 CASE_FLT_FN (BUILT_IN_CASIN):
323 CASE_FLT_FN (BUILT_IN_CASINH):
324 CASE_FLT_FN (BUILT_IN_CATAN):
325 CASE_FLT_FN (BUILT_IN_CATANH):
326 CASE_FLT_FN (BUILT_IN_CBRT):
327 CASE_FLT_FN (BUILT_IN_CPROJ):
328 CASE_FLT_FN (BUILT_IN_CSIN):
329 CASE_FLT_FN (BUILT_IN_CSINH):
330 CASE_FLT_FN (BUILT_IN_CTAN):
331 CASE_FLT_FN (BUILT_IN_CTANH):
332 CASE_FLT_FN (BUILT_IN_ERF):
333 CASE_FLT_FN (BUILT_IN_LLROUND):
334 CASE_FLT_FN (BUILT_IN_LROUND):
335 CASE_FLT_FN (BUILT_IN_ROUND):
336 CASE_FLT_FN (BUILT_IN_SIN):
337 CASE_FLT_FN (BUILT_IN_SINH):
338 CASE_FLT_FN (BUILT_IN_TAN):
339 CASE_FLT_FN (BUILT_IN_TANH):
340 CASE_FLT_FN (BUILT_IN_TRUNC):
341 return true;
342
343 CASE_FLT_FN (BUILT_IN_LLRINT):
344 CASE_FLT_FN (BUILT_IN_LRINT):
345 CASE_FLT_FN (BUILT_IN_NEARBYINT):
346 CASE_FLT_FN (BUILT_IN_RINT):
347 return !flag_rounding_math;
348
349 default:
350 break;
351 }
352 return false;
353 }
354
355 /* Check whether we may negate an integer constant T without causing
356 overflow. */
357
358 bool
359 may_negate_without_overflow_p (const_tree t)
360 {
361 unsigned HOST_WIDE_INT val;
362 unsigned int prec;
363 tree type;
364
365 gcc_assert (TREE_CODE (t) == INTEGER_CST);
366
367 type = TREE_TYPE (t);
368 if (TYPE_UNSIGNED (type))
369 return false;
370
371 prec = TYPE_PRECISION (type);
372 if (prec > HOST_BITS_PER_WIDE_INT)
373 {
374 if (TREE_INT_CST_LOW (t) != 0)
375 return true;
376 prec -= HOST_BITS_PER_WIDE_INT;
377 val = TREE_INT_CST_HIGH (t);
378 }
379 else
380 val = TREE_INT_CST_LOW (t);
381 if (prec < HOST_BITS_PER_WIDE_INT)
382 val &= ((unsigned HOST_WIDE_INT) 1 << prec) - 1;
383 return val != ((unsigned HOST_WIDE_INT) 1 << (prec - 1));
384 }
385
386 /* Determine whether an expression T can be cheaply negated using
387 the function negate_expr without introducing undefined overflow. */
388
389 static bool
390 negate_expr_p (tree t)
391 {
392 tree type;
393
394 if (t == 0)
395 return false;
396
397 type = TREE_TYPE (t);
398
399 STRIP_SIGN_NOPS (t);
400 switch (TREE_CODE (t))
401 {
402 case INTEGER_CST:
403 if (TYPE_OVERFLOW_WRAPS (type))
404 return true;
405
406 /* Check that -CST will not overflow type. */
407 return may_negate_without_overflow_p (t);
408 case BIT_NOT_EXPR:
409 return (INTEGRAL_TYPE_P (type)
410 && TYPE_OVERFLOW_WRAPS (type));
411
412 case FIXED_CST:
413 case NEGATE_EXPR:
414 return true;
415
416 case REAL_CST:
417 /* We want to canonicalize to positive real constants. Pretend
418 that only negative ones can be easily negated. */
419 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
420
421 case COMPLEX_CST:
422 return negate_expr_p (TREE_REALPART (t))
423 && negate_expr_p (TREE_IMAGPART (t));
424
425 case VECTOR_CST:
426 {
427 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
428 return true;
429
430 int count = TYPE_VECTOR_SUBPARTS (type), i;
431
432 for (i = 0; i < count; i++)
433 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
434 return false;
435
436 return true;
437 }
438
439 case COMPLEX_EXPR:
440 return negate_expr_p (TREE_OPERAND (t, 0))
441 && negate_expr_p (TREE_OPERAND (t, 1));
442
443 case CONJ_EXPR:
444 return negate_expr_p (TREE_OPERAND (t, 0));
445
446 case PLUS_EXPR:
447 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
448 || HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
449 return false;
450 /* -(A + B) -> (-B) - A. */
451 if (negate_expr_p (TREE_OPERAND (t, 1))
452 && reorder_operands_p (TREE_OPERAND (t, 0),
453 TREE_OPERAND (t, 1)))
454 return true;
455 /* -(A + B) -> (-A) - B. */
456 return negate_expr_p (TREE_OPERAND (t, 0));
457
458 case MINUS_EXPR:
459 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
460 return !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
461 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
462 && reorder_operands_p (TREE_OPERAND (t, 0),
463 TREE_OPERAND (t, 1));
464
465 case MULT_EXPR:
466 if (TYPE_UNSIGNED (TREE_TYPE (t)))
467 break;
468
469 /* Fall through. */
470
471 case RDIV_EXPR:
472 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (t))))
473 return negate_expr_p (TREE_OPERAND (t, 1))
474 || negate_expr_p (TREE_OPERAND (t, 0));
475 break;
476
477 case TRUNC_DIV_EXPR:
478 case ROUND_DIV_EXPR:
479 case FLOOR_DIV_EXPR:
480 case CEIL_DIV_EXPR:
481 case EXACT_DIV_EXPR:
482 /* In general we can't negate A / B, because if A is INT_MIN and
483 B is 1, we may turn this into INT_MIN / -1 which is undefined
484 and actually traps on some architectures. But if overflow is
485 undefined, we can negate, because - (INT_MIN / 1) is an
486 overflow. */
487 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
488 {
489 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
490 break;
491 /* If overflow is undefined then we have to be careful because
492 we ask whether it's ok to associate the negate with the
493 division which is not ok for example for
494 -((a - b) / c) where (-(a - b)) / c may invoke undefined
495 overflow because of negating INT_MIN. So do not use
496 negate_expr_p here but open-code the two important cases. */
497 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
498 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
499 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
500 return true;
501 }
502 else if (negate_expr_p (TREE_OPERAND (t, 0)))
503 return true;
504 return negate_expr_p (TREE_OPERAND (t, 1));
505
506 case NOP_EXPR:
507 /* Negate -((double)float) as (double)(-float). */
508 if (TREE_CODE (type) == REAL_TYPE)
509 {
510 tree tem = strip_float_extensions (t);
511 if (tem != t)
512 return negate_expr_p (tem);
513 }
514 break;
515
516 case CALL_EXPR:
517 /* Negate -f(x) as f(-x). */
518 if (negate_mathfn_p (builtin_mathfn_code (t)))
519 return negate_expr_p (CALL_EXPR_ARG (t, 0));
520 break;
521
522 case RSHIFT_EXPR:
523 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
524 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
525 {
526 tree op1 = TREE_OPERAND (t, 1);
527 if (TREE_INT_CST_HIGH (op1) == 0
528 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
529 == TREE_INT_CST_LOW (op1))
530 return true;
531 }
532 break;
533
534 default:
535 break;
536 }
537 return false;
538 }
539
540 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
541 simplification is possible.
542 If negate_expr_p would return true for T, NULL_TREE will never be
543 returned. */
544
545 static tree
546 fold_negate_expr (location_t loc, tree t)
547 {
548 tree type = TREE_TYPE (t);
549 tree tem;
550
551 switch (TREE_CODE (t))
552 {
553 /* Convert - (~A) to A + 1. */
554 case BIT_NOT_EXPR:
555 if (INTEGRAL_TYPE_P (type))
556 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
557 build_one_cst (type));
558 break;
559
560 case INTEGER_CST:
561 tem = fold_negate_const (t, type);
562 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
563 || !TYPE_OVERFLOW_TRAPS (type))
564 return tem;
565 break;
566
567 case REAL_CST:
568 tem = fold_negate_const (t, type);
569 /* Two's complement FP formats, such as c4x, may overflow. */
570 if (!TREE_OVERFLOW (tem) || !flag_trapping_math)
571 return tem;
572 break;
573
574 case FIXED_CST:
575 tem = fold_negate_const (t, type);
576 return tem;
577
578 case COMPLEX_CST:
579 {
580 tree rpart = negate_expr (TREE_REALPART (t));
581 tree ipart = negate_expr (TREE_IMAGPART (t));
582
583 if ((TREE_CODE (rpart) == REAL_CST
584 && TREE_CODE (ipart) == REAL_CST)
585 || (TREE_CODE (rpart) == INTEGER_CST
586 && TREE_CODE (ipart) == INTEGER_CST))
587 return build_complex (type, rpart, ipart);
588 }
589 break;
590
591 case VECTOR_CST:
592 {
593 int count = TYPE_VECTOR_SUBPARTS (type), i;
594 tree *elts = XALLOCAVEC (tree, count);
595
596 for (i = 0; i < count; i++)
597 {
598 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
599 if (elts[i] == NULL_TREE)
600 return NULL_TREE;
601 }
602
603 return build_vector (type, elts);
604 }
605
606 case COMPLEX_EXPR:
607 if (negate_expr_p (t))
608 return fold_build2_loc (loc, COMPLEX_EXPR, type,
609 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
610 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
611 break;
612
613 case CONJ_EXPR:
614 if (negate_expr_p (t))
615 return fold_build1_loc (loc, CONJ_EXPR, type,
616 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
617 break;
618
619 case NEGATE_EXPR:
620 return TREE_OPERAND (t, 0);
621
622 case PLUS_EXPR:
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
624 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
625 {
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t, 1))
628 && reorder_operands_p (TREE_OPERAND (t, 0),
629 TREE_OPERAND (t, 1)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 1));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 0));
634 }
635
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t, 0)))
638 {
639 tem = negate_expr (TREE_OPERAND (t, 0));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 1));
642 }
643 }
644 break;
645
646 case MINUS_EXPR:
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type))
649 && !HONOR_SIGNED_ZEROS (TYPE_MODE (type))
650 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
651 return fold_build2_loc (loc, MINUS_EXPR, type,
652 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
653 break;
654
655 case MULT_EXPR:
656 if (TYPE_UNSIGNED (type))
657 break;
658
659 /* Fall through. */
660
661 case RDIV_EXPR:
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type)))
663 {
664 tem = TREE_OPERAND (t, 1);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0), negate_expr (tem));
668 tem = TREE_OPERAND (t, 0);
669 if (negate_expr_p (tem))
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 negate_expr (tem), TREE_OPERAND (t, 1));
672 }
673 break;
674
675 case TRUNC_DIV_EXPR:
676 case ROUND_DIV_EXPR:
677 case FLOOR_DIV_EXPR:
678 case CEIL_DIV_EXPR:
679 case EXACT_DIV_EXPR:
680 /* In general we can't negate A / B, because if A is INT_MIN and
681 B is 1, we may turn this into INT_MIN / -1 which is undefined
682 and actually traps on some architectures. But if overflow is
683 undefined, we can negate, because - (INT_MIN / 1) is an
684 overflow. */
685 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
686 {
687 const char * const warnmsg = G_("assuming signed overflow does not "
688 "occur when negating a division");
689 tem = TREE_OPERAND (t, 1);
690 if (negate_expr_p (tem))
691 {
692 if (INTEGRAL_TYPE_P (type)
693 && (TREE_CODE (tem) != INTEGER_CST
694 || integer_onep (tem)))
695 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
696 return fold_build2_loc (loc, TREE_CODE (t), type,
697 TREE_OPERAND (t, 0), negate_expr (tem));
698 }
699 /* If overflow is undefined then we have to be careful because
700 we ask whether it's ok to associate the negate with the
701 division which is not ok for example for
702 -((a - b) / c) where (-(a - b)) / c may invoke undefined
703 overflow because of negating INT_MIN. So do not use
704 negate_expr_p here but open-code the two important cases. */
705 tem = TREE_OPERAND (t, 0);
706 if ((INTEGRAL_TYPE_P (type)
707 && (TREE_CODE (tem) == NEGATE_EXPR
708 || (TREE_CODE (tem) == INTEGER_CST
709 && may_negate_without_overflow_p (tem))))
710 || !INTEGRAL_TYPE_P (type))
711 return fold_build2_loc (loc, TREE_CODE (t), type,
712 negate_expr (tem), TREE_OPERAND (t, 1));
713 }
714 break;
715
716 case NOP_EXPR:
717 /* Convert -((double)float) into (double)(-float). */
718 if (TREE_CODE (type) == REAL_TYPE)
719 {
720 tem = strip_float_extensions (t);
721 if (tem != t && negate_expr_p (tem))
722 return fold_convert_loc (loc, type, negate_expr (tem));
723 }
724 break;
725
726 case CALL_EXPR:
727 /* Negate -f(x) as f(-x). */
728 if (negate_mathfn_p (builtin_mathfn_code (t))
729 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
730 {
731 tree fndecl, arg;
732
733 fndecl = get_callee_fndecl (t);
734 arg = negate_expr (CALL_EXPR_ARG (t, 0));
735 return build_call_expr_loc (loc, fndecl, 1, arg);
736 }
737 break;
738
739 case RSHIFT_EXPR:
740 /* Optimize -((int)x >> 31) into (unsigned)x >> 31. */
741 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
742 {
743 tree op1 = TREE_OPERAND (t, 1);
744 if (TREE_INT_CST_HIGH (op1) == 0
745 && (unsigned HOST_WIDE_INT) (TYPE_PRECISION (type) - 1)
746 == TREE_INT_CST_LOW (op1))
747 {
748 tree ntype = TYPE_UNSIGNED (type)
749 ? signed_type_for (type)
750 : unsigned_type_for (type);
751 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
752 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
753 return fold_convert_loc (loc, type, temp);
754 }
755 }
756 break;
757
758 default:
759 break;
760 }
761
762 return NULL_TREE;
763 }
764
765 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
766 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
767 return NULL_TREE. */
768
769 static tree
770 negate_expr (tree t)
771 {
772 tree type, tem;
773 location_t loc;
774
775 if (t == NULL_TREE)
776 return NULL_TREE;
777
778 loc = EXPR_LOCATION (t);
779 type = TREE_TYPE (t);
780 STRIP_SIGN_NOPS (t);
781
782 tem = fold_negate_expr (loc, t);
783 if (!tem)
784 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
785 return fold_convert_loc (loc, type, tem);
786 }
787 \f
788 /* Split a tree IN into a constant, literal and variable parts that could be
789 combined with CODE to make IN. "constant" means an expression with
790 TREE_CONSTANT but that isn't an actual constant. CODE must be a
791 commutative arithmetic operation. Store the constant part into *CONP,
792 the literal in *LITP and return the variable part. If a part isn't
793 present, set it to null. If the tree does not decompose in this way,
794 return the entire tree as the variable part and the other parts as null.
795
796 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
797 case, we negate an operand that was subtracted. Except if it is a
798 literal for which we use *MINUS_LITP instead.
799
800 If NEGATE_P is true, we are negating all of IN, again except a literal
801 for which we use *MINUS_LITP instead.
802
803 If IN is itself a literal or constant, return it as appropriate.
804
805 Note that we do not guarantee that any of the three values will be the
806 same type as IN, but they will have the same signedness and mode. */
807
808 static tree
809 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
810 tree *minus_litp, int negate_p)
811 {
812 tree var = 0;
813
814 *conp = 0;
815 *litp = 0;
816 *minus_litp = 0;
817
818 /* Strip any conversions that don't change the machine mode or signedness. */
819 STRIP_SIGN_NOPS (in);
820
821 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
822 || TREE_CODE (in) == FIXED_CST)
823 *litp = in;
824 else if (TREE_CODE (in) == code
825 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
826 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
827 /* We can associate addition and subtraction together (even
828 though the C standard doesn't say so) for integers because
829 the value is not affected. For reals, the value might be
830 affected, so we can't. */
831 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
832 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
833 {
834 tree op0 = TREE_OPERAND (in, 0);
835 tree op1 = TREE_OPERAND (in, 1);
836 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
837 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
838
839 /* First see if either of the operands is a literal, then a constant. */
840 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
841 || TREE_CODE (op0) == FIXED_CST)
842 *litp = op0, op0 = 0;
843 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
844 || TREE_CODE (op1) == FIXED_CST)
845 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
846
847 if (op0 != 0 && TREE_CONSTANT (op0))
848 *conp = op0, op0 = 0;
849 else if (op1 != 0 && TREE_CONSTANT (op1))
850 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
851
852 /* If we haven't dealt with either operand, this is not a case we can
853 decompose. Otherwise, VAR is either of the ones remaining, if any. */
854 if (op0 != 0 && op1 != 0)
855 var = in;
856 else if (op0 != 0)
857 var = op0;
858 else
859 var = op1, neg_var_p = neg1_p;
860
861 /* Now do any needed negations. */
862 if (neg_litp_p)
863 *minus_litp = *litp, *litp = 0;
864 if (neg_conp_p)
865 *conp = negate_expr (*conp);
866 if (neg_var_p)
867 var = negate_expr (var);
868 }
869 else if (TREE_CODE (in) == BIT_NOT_EXPR
870 && code == PLUS_EXPR)
871 {
872 /* -X - 1 is folded to ~X, undo that here. */
873 *minus_litp = build_one_cst (TREE_TYPE (in));
874 var = negate_expr (TREE_OPERAND (in, 0));
875 }
876 else if (TREE_CONSTANT (in))
877 *conp = in;
878 else
879 var = in;
880
881 if (negate_p)
882 {
883 if (*litp)
884 *minus_litp = *litp, *litp = 0;
885 else if (*minus_litp)
886 *litp = *minus_litp, *minus_litp = 0;
887 *conp = negate_expr (*conp);
888 var = negate_expr (var);
889 }
890
891 return var;
892 }
893
894 /* Re-associate trees split by the above function. T1 and T2 are
895 either expressions to associate or null. Return the new
896 expression, if any. LOC is the location of the new expression. If
897 we build an operation, do it in TYPE and with CODE. */
898
899 static tree
900 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
901 {
902 if (t1 == 0)
903 return t2;
904 else if (t2 == 0)
905 return t1;
906
907 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
908 try to fold this since we will have infinite recursion. But do
909 deal with any NEGATE_EXPRs. */
910 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
911 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
912 {
913 if (code == PLUS_EXPR)
914 {
915 if (TREE_CODE (t1) == NEGATE_EXPR)
916 return build2_loc (loc, MINUS_EXPR, type,
917 fold_convert_loc (loc, type, t2),
918 fold_convert_loc (loc, type,
919 TREE_OPERAND (t1, 0)));
920 else if (TREE_CODE (t2) == NEGATE_EXPR)
921 return build2_loc (loc, MINUS_EXPR, type,
922 fold_convert_loc (loc, type, t1),
923 fold_convert_loc (loc, type,
924 TREE_OPERAND (t2, 0)));
925 else if (integer_zerop (t2))
926 return fold_convert_loc (loc, type, t1);
927 }
928 else if (code == MINUS_EXPR)
929 {
930 if (integer_zerop (t2))
931 return fold_convert_loc (loc, type, t1);
932 }
933
934 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937
938 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
939 fold_convert_loc (loc, type, t2));
940 }
941 \f
942 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
943 for use in int_const_binop, size_binop and size_diffop. */
944
945 static bool
946 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
947 {
948 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
949 return false;
950 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
951 return false;
952
953 switch (code)
954 {
955 case LSHIFT_EXPR:
956 case RSHIFT_EXPR:
957 case LROTATE_EXPR:
958 case RROTATE_EXPR:
959 return true;
960
961 default:
962 break;
963 }
964
965 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
966 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
967 && TYPE_MODE (type1) == TYPE_MODE (type2);
968 }
969
970
971 /* Combine two integer constants ARG1 and ARG2 under operation CODE
972 to produce a new constant. Return NULL_TREE if we don't know how
973 to evaluate CODE at compile-time. */
974
975 static tree
976 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree arg2,
977 int overflowable)
978 {
979 double_int op1, op2, res, tmp;
980 tree t;
981 tree type = TREE_TYPE (arg1);
982 bool uns = TYPE_UNSIGNED (type);
983 bool overflow = false;
984
985 op1 = tree_to_double_int (arg1);
986 op2 = tree_to_double_int (arg2);
987
988 switch (code)
989 {
990 case BIT_IOR_EXPR:
991 res = op1 | op2;
992 break;
993
994 case BIT_XOR_EXPR:
995 res = op1 ^ op2;
996 break;
997
998 case BIT_AND_EXPR:
999 res = op1 & op2;
1000 break;
1001
1002 case RSHIFT_EXPR:
1003 res = op1.rshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1004 break;
1005
1006 case LSHIFT_EXPR:
1007 /* It's unclear from the C standard whether shifts can overflow.
1008 The following code ignores overflow; perhaps a C standard
1009 interpretation ruling is needed. */
1010 res = op1.lshift (op2.to_shwi (), TYPE_PRECISION (type), !uns);
1011 break;
1012
1013 case RROTATE_EXPR:
1014 res = op1.rrotate (op2.to_shwi (), TYPE_PRECISION (type));
1015 break;
1016
1017 case LROTATE_EXPR:
1018 res = op1.lrotate (op2.to_shwi (), TYPE_PRECISION (type));
1019 break;
1020
1021 case PLUS_EXPR:
1022 res = op1.add_with_sign (op2, false, &overflow);
1023 break;
1024
1025 case MINUS_EXPR:
1026 res = op1.sub_with_overflow (op2, &overflow);
1027 break;
1028
1029 case MULT_EXPR:
1030 res = op1.mul_with_sign (op2, false, &overflow);
1031 break;
1032
1033 case MULT_HIGHPART_EXPR:
1034 if (TYPE_PRECISION (type) > HOST_BITS_PER_WIDE_INT)
1035 {
1036 bool dummy_overflow;
1037 if (TYPE_PRECISION (type) != 2 * HOST_BITS_PER_WIDE_INT)
1038 return NULL_TREE;
1039 op1.wide_mul_with_sign (op2, uns, &res, &dummy_overflow);
1040 }
1041 else
1042 {
1043 bool dummy_overflow;
1044 /* MULT_HIGHPART_EXPR can't ever oveflow, as the multiplication
1045 is performed in twice the precision of arguments. */
1046 tmp = op1.mul_with_sign (op2, false, &dummy_overflow);
1047 res = tmp.rshift (TYPE_PRECISION (type),
1048 2 * TYPE_PRECISION (type), !uns);
1049 }
1050 break;
1051
1052 case TRUNC_DIV_EXPR:
1053 case FLOOR_DIV_EXPR: case CEIL_DIV_EXPR:
1054 case EXACT_DIV_EXPR:
1055 /* This is a shortcut for a common special case. */
1056 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1057 && !TREE_OVERFLOW (arg1)
1058 && !TREE_OVERFLOW (arg2)
1059 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1060 {
1061 if (code == CEIL_DIV_EXPR)
1062 op1.low += op2.low - 1;
1063
1064 res.low = op1.low / op2.low, res.high = 0;
1065 break;
1066 }
1067
1068 /* ... fall through ... */
1069
1070 case ROUND_DIV_EXPR:
1071 if (op2.is_zero ())
1072 return NULL_TREE;
1073 if (op2.is_one ())
1074 {
1075 res = op1;
1076 break;
1077 }
1078 if (op1 == op2 && !op1.is_zero ())
1079 {
1080 res = double_int_one;
1081 break;
1082 }
1083 res = op1.divmod_with_overflow (op2, uns, code, &tmp, &overflow);
1084 break;
1085
1086 case TRUNC_MOD_EXPR:
1087 case FLOOR_MOD_EXPR: case CEIL_MOD_EXPR:
1088 /* This is a shortcut for a common special case. */
1089 if (op2.high == 0 && (HOST_WIDE_INT) op2.low > 0
1090 && !TREE_OVERFLOW (arg1)
1091 && !TREE_OVERFLOW (arg2)
1092 && op1.high == 0 && (HOST_WIDE_INT) op1.low >= 0)
1093 {
1094 if (code == CEIL_MOD_EXPR)
1095 op1.low += op2.low - 1;
1096 res.low = op1.low % op2.low, res.high = 0;
1097 break;
1098 }
1099
1100 /* ... fall through ... */
1101
1102 case ROUND_MOD_EXPR:
1103 if (op2.is_zero ())
1104 return NULL_TREE;
1105 tmp = op1.divmod_with_overflow (op2, uns, code, &res, &overflow);
1106 break;
1107
1108 case MIN_EXPR:
1109 res = op1.min (op2, uns);
1110 break;
1111
1112 case MAX_EXPR:
1113 res = op1.max (op2, uns);
1114 break;
1115
1116 default:
1117 return NULL_TREE;
1118 }
1119
1120 t = force_fit_type_double (TREE_TYPE (arg1), res, overflowable,
1121 (!uns && overflow)
1122 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2));
1123
1124 return t;
1125 }
1126
1127 tree
1128 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1129 {
1130 return int_const_binop_1 (code, arg1, arg2, 1);
1131 }
1132
1133 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1134 constant. We assume ARG1 and ARG2 have the same data type, or at least
1135 are the same kind of constant and the same machine mode. Return zero if
1136 combining the constants is not allowed in the current operating mode. */
1137
1138 static tree
1139 const_binop (enum tree_code code, tree arg1, tree arg2)
1140 {
1141 /* Sanity check for the recursive cases. */
1142 if (!arg1 || !arg2)
1143 return NULL_TREE;
1144
1145 STRIP_NOPS (arg1);
1146 STRIP_NOPS (arg2);
1147
1148 if (TREE_CODE (arg1) == INTEGER_CST)
1149 return int_const_binop (code, arg1, arg2);
1150
1151 if (TREE_CODE (arg1) == REAL_CST)
1152 {
1153 enum machine_mode mode;
1154 REAL_VALUE_TYPE d1;
1155 REAL_VALUE_TYPE d2;
1156 REAL_VALUE_TYPE value;
1157 REAL_VALUE_TYPE result;
1158 bool inexact;
1159 tree t, type;
1160
1161 /* The following codes are handled by real_arithmetic. */
1162 switch (code)
1163 {
1164 case PLUS_EXPR:
1165 case MINUS_EXPR:
1166 case MULT_EXPR:
1167 case RDIV_EXPR:
1168 case MIN_EXPR:
1169 case MAX_EXPR:
1170 break;
1171
1172 default:
1173 return NULL_TREE;
1174 }
1175
1176 d1 = TREE_REAL_CST (arg1);
1177 d2 = TREE_REAL_CST (arg2);
1178
1179 type = TREE_TYPE (arg1);
1180 mode = TYPE_MODE (type);
1181
1182 /* Don't perform operation if we honor signaling NaNs and
1183 either operand is a NaN. */
1184 if (HONOR_SNANS (mode)
1185 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1186 return NULL_TREE;
1187
1188 /* Don't perform operation if it would raise a division
1189 by zero exception. */
1190 if (code == RDIV_EXPR
1191 && REAL_VALUES_EQUAL (d2, dconst0)
1192 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1193 return NULL_TREE;
1194
1195 /* If either operand is a NaN, just return it. Otherwise, set up
1196 for floating-point trap; we return an overflow. */
1197 if (REAL_VALUE_ISNAN (d1))
1198 return arg1;
1199 else if (REAL_VALUE_ISNAN (d2))
1200 return arg2;
1201
1202 inexact = real_arithmetic (&value, code, &d1, &d2);
1203 real_convert (&result, mode, &value);
1204
1205 /* Don't constant fold this floating point operation if
1206 the result has overflowed and flag_trapping_math. */
1207 if (flag_trapping_math
1208 && MODE_HAS_INFINITIES (mode)
1209 && REAL_VALUE_ISINF (result)
1210 && !REAL_VALUE_ISINF (d1)
1211 && !REAL_VALUE_ISINF (d2))
1212 return NULL_TREE;
1213
1214 /* Don't constant fold this floating point operation if the
1215 result may dependent upon the run-time rounding mode and
1216 flag_rounding_math is set, or if GCC's software emulation
1217 is unable to accurately represent the result. */
1218 if ((flag_rounding_math
1219 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1220 && (inexact || !real_identical (&result, &value)))
1221 return NULL_TREE;
1222
1223 t = build_real (type, result);
1224
1225 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1226 return t;
1227 }
1228
1229 if (TREE_CODE (arg1) == FIXED_CST)
1230 {
1231 FIXED_VALUE_TYPE f1;
1232 FIXED_VALUE_TYPE f2;
1233 FIXED_VALUE_TYPE result;
1234 tree t, type;
1235 int sat_p;
1236 bool overflow_p;
1237
1238 /* The following codes are handled by fixed_arithmetic. */
1239 switch (code)
1240 {
1241 case PLUS_EXPR:
1242 case MINUS_EXPR:
1243 case MULT_EXPR:
1244 case TRUNC_DIV_EXPR:
1245 f2 = TREE_FIXED_CST (arg2);
1246 break;
1247
1248 case LSHIFT_EXPR:
1249 case RSHIFT_EXPR:
1250 f2.data.high = TREE_INT_CST_HIGH (arg2);
1251 f2.data.low = TREE_INT_CST_LOW (arg2);
1252 f2.mode = SImode;
1253 break;
1254
1255 default:
1256 return NULL_TREE;
1257 }
1258
1259 f1 = TREE_FIXED_CST (arg1);
1260 type = TREE_TYPE (arg1);
1261 sat_p = TYPE_SATURATING (type);
1262 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1263 t = build_fixed (type, result);
1264 /* Propagate overflow flags. */
1265 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1266 TREE_OVERFLOW (t) = 1;
1267 return t;
1268 }
1269
1270 if (TREE_CODE (arg1) == COMPLEX_CST)
1271 {
1272 tree type = TREE_TYPE (arg1);
1273 tree r1 = TREE_REALPART (arg1);
1274 tree i1 = TREE_IMAGPART (arg1);
1275 tree r2 = TREE_REALPART (arg2);
1276 tree i2 = TREE_IMAGPART (arg2);
1277 tree real, imag;
1278
1279 switch (code)
1280 {
1281 case PLUS_EXPR:
1282 case MINUS_EXPR:
1283 real = const_binop (code, r1, r2);
1284 imag = const_binop (code, i1, i2);
1285 break;
1286
1287 case MULT_EXPR:
1288 if (COMPLEX_FLOAT_TYPE_P (type))
1289 return do_mpc_arg2 (arg1, arg2, type,
1290 /* do_nonfinite= */ folding_initializer,
1291 mpc_mul);
1292
1293 real = const_binop (MINUS_EXPR,
1294 const_binop (MULT_EXPR, r1, r2),
1295 const_binop (MULT_EXPR, i1, i2));
1296 imag = const_binop (PLUS_EXPR,
1297 const_binop (MULT_EXPR, r1, i2),
1298 const_binop (MULT_EXPR, i1, r2));
1299 break;
1300
1301 case RDIV_EXPR:
1302 if (COMPLEX_FLOAT_TYPE_P (type))
1303 return do_mpc_arg2 (arg1, arg2, type,
1304 /* do_nonfinite= */ folding_initializer,
1305 mpc_div);
1306 /* Fallthru ... */
1307 case TRUNC_DIV_EXPR:
1308 case CEIL_DIV_EXPR:
1309 case FLOOR_DIV_EXPR:
1310 case ROUND_DIV_EXPR:
1311 if (flag_complex_method == 0)
1312 {
1313 /* Keep this algorithm in sync with
1314 tree-complex.c:expand_complex_div_straight().
1315
1316 Expand complex division to scalars, straightforward algorithm.
1317 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1318 t = br*br + bi*bi
1319 */
1320 tree magsquared
1321 = const_binop (PLUS_EXPR,
1322 const_binop (MULT_EXPR, r2, r2),
1323 const_binop (MULT_EXPR, i2, i2));
1324 tree t1
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r1, r2),
1327 const_binop (MULT_EXPR, i1, i2));
1328 tree t2
1329 = const_binop (MINUS_EXPR,
1330 const_binop (MULT_EXPR, i1, r2),
1331 const_binop (MULT_EXPR, r1, i2));
1332
1333 real = const_binop (code, t1, magsquared);
1334 imag = const_binop (code, t2, magsquared);
1335 }
1336 else
1337 {
1338 /* Keep this algorithm in sync with
1339 tree-complex.c:expand_complex_div_wide().
1340
1341 Expand complex division to scalars, modified algorithm to minimize
1342 overflow with wide input ranges. */
1343 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1344 fold_abs_const (r2, TREE_TYPE (type)),
1345 fold_abs_const (i2, TREE_TYPE (type)));
1346
1347 if (integer_nonzerop (compare))
1348 {
1349 /* In the TRUE branch, we compute
1350 ratio = br/bi;
1351 div = (br * ratio) + bi;
1352 tr = (ar * ratio) + ai;
1353 ti = (ai * ratio) - ar;
1354 tr = tr / div;
1355 ti = ti / div; */
1356 tree ratio = const_binop (code, r2, i2);
1357 tree div = const_binop (PLUS_EXPR, i2,
1358 const_binop (MULT_EXPR, r2, ratio));
1359 real = const_binop (MULT_EXPR, r1, ratio);
1360 real = const_binop (PLUS_EXPR, real, i1);
1361 real = const_binop (code, real, div);
1362
1363 imag = const_binop (MULT_EXPR, i1, ratio);
1364 imag = const_binop (MINUS_EXPR, imag, r1);
1365 imag = const_binop (code, imag, div);
1366 }
1367 else
1368 {
1369 /* In the FALSE branch, we compute
1370 ratio = d/c;
1371 divisor = (d * ratio) + c;
1372 tr = (b * ratio) + a;
1373 ti = b - (a * ratio);
1374 tr = tr / div;
1375 ti = ti / div; */
1376 tree ratio = const_binop (code, i2, r2);
1377 tree div = const_binop (PLUS_EXPR, r2,
1378 const_binop (MULT_EXPR, i2, ratio));
1379
1380 real = const_binop (MULT_EXPR, i1, ratio);
1381 real = const_binop (PLUS_EXPR, real, r1);
1382 real = const_binop (code, real, div);
1383
1384 imag = const_binop (MULT_EXPR, r1, ratio);
1385 imag = const_binop (MINUS_EXPR, i1, imag);
1386 imag = const_binop (code, imag, div);
1387 }
1388 }
1389 break;
1390
1391 default:
1392 return NULL_TREE;
1393 }
1394
1395 if (real && imag)
1396 return build_complex (type, real, imag);
1397 }
1398
1399 if (TREE_CODE (arg1) == VECTOR_CST
1400 && TREE_CODE (arg2) == VECTOR_CST)
1401 {
1402 tree type = TREE_TYPE (arg1);
1403 int count = TYPE_VECTOR_SUBPARTS (type), i;
1404 tree *elts = XALLOCAVEC (tree, count);
1405
1406 for (i = 0; i < count; i++)
1407 {
1408 tree elem1 = VECTOR_CST_ELT (arg1, i);
1409 tree elem2 = VECTOR_CST_ELT (arg2, i);
1410
1411 elts[i] = const_binop (code, elem1, elem2);
1412
1413 /* It is possible that const_binop cannot handle the given
1414 code and return NULL_TREE */
1415 if (elts[i] == NULL_TREE)
1416 return NULL_TREE;
1417 }
1418
1419 return build_vector (type, elts);
1420 }
1421
1422 /* Shifts allow a scalar offset for a vector. */
1423 if (TREE_CODE (arg1) == VECTOR_CST
1424 && TREE_CODE (arg2) == INTEGER_CST)
1425 {
1426 tree type = TREE_TYPE (arg1);
1427 int count = TYPE_VECTOR_SUBPARTS (type), i;
1428 tree *elts = XALLOCAVEC (tree, count);
1429
1430 if (code == VEC_LSHIFT_EXPR
1431 || code == VEC_RSHIFT_EXPR)
1432 {
1433 if (!host_integerp (arg2, 1))
1434 return NULL_TREE;
1435
1436 unsigned HOST_WIDE_INT shiftc = tree_low_cst (arg2, 1);
1437 unsigned HOST_WIDE_INT outerc = tree_low_cst (TYPE_SIZE (type), 1);
1438 unsigned HOST_WIDE_INT innerc
1439 = tree_low_cst (TYPE_SIZE (TREE_TYPE (type)), 1);
1440 if (shiftc >= outerc || (shiftc % innerc) != 0)
1441 return NULL_TREE;
1442 int offset = shiftc / innerc;
1443 /* The direction of VEC_[LR]SHIFT_EXPR is endian dependent.
1444 For reductions, compiler emits VEC_RSHIFT_EXPR always,
1445 for !BYTES_BIG_ENDIAN picks first vector element, but
1446 for BYTES_BIG_ENDIAN last element from the vector. */
1447 if ((code == VEC_RSHIFT_EXPR) ^ (!BYTES_BIG_ENDIAN))
1448 offset = -offset;
1449 tree zero = build_zero_cst (TREE_TYPE (type));
1450 for (i = 0; i < count; i++)
1451 {
1452 if (i + offset < 0 || i + offset >= count)
1453 elts[i] = zero;
1454 else
1455 elts[i] = VECTOR_CST_ELT (arg1, i + offset);
1456 }
1457 }
1458 else
1459 for (i = 0; i < count; i++)
1460 {
1461 tree elem1 = VECTOR_CST_ELT (arg1, i);
1462
1463 elts[i] = const_binop (code, elem1, arg2);
1464
1465 /* It is possible that const_binop cannot handle the given
1466 code and return NULL_TREE */
1467 if (elts[i] == NULL_TREE)
1468 return NULL_TREE;
1469 }
1470
1471 return build_vector (type, elts);
1472 }
1473 return NULL_TREE;
1474 }
1475
1476 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1477 indicates which particular sizetype to create. */
1478
1479 tree
1480 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1481 {
1482 return build_int_cst (sizetype_tab[(int) kind], number);
1483 }
1484 \f
1485 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1486 is a tree code. The type of the result is taken from the operands.
1487 Both must be equivalent integer types, ala int_binop_types_match_p.
1488 If the operands are constant, so is the result. */
1489
1490 tree
1491 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1492 {
1493 tree type = TREE_TYPE (arg0);
1494
1495 if (arg0 == error_mark_node || arg1 == error_mark_node)
1496 return error_mark_node;
1497
1498 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1499 TREE_TYPE (arg1)));
1500
1501 /* Handle the special case of two integer constants faster. */
1502 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1503 {
1504 /* And some specific cases even faster than that. */
1505 if (code == PLUS_EXPR)
1506 {
1507 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1508 return arg1;
1509 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1510 return arg0;
1511 }
1512 else if (code == MINUS_EXPR)
1513 {
1514 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1515 return arg0;
1516 }
1517 else if (code == MULT_EXPR)
1518 {
1519 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1520 return arg1;
1521 }
1522
1523 /* Handle general case of two integer constants. For sizetype
1524 constant calculations we always want to know about overflow,
1525 even in the unsigned case. */
1526 return int_const_binop_1 (code, arg0, arg1, -1);
1527 }
1528
1529 return fold_build2_loc (loc, code, type, arg0, arg1);
1530 }
1531
1532 /* Given two values, either both of sizetype or both of bitsizetype,
1533 compute the difference between the two values. Return the value
1534 in signed type corresponding to the type of the operands. */
1535
1536 tree
1537 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1538 {
1539 tree type = TREE_TYPE (arg0);
1540 tree ctype;
1541
1542 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1543 TREE_TYPE (arg1)));
1544
1545 /* If the type is already signed, just do the simple thing. */
1546 if (!TYPE_UNSIGNED (type))
1547 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1548
1549 if (type == sizetype)
1550 ctype = ssizetype;
1551 else if (type == bitsizetype)
1552 ctype = sbitsizetype;
1553 else
1554 ctype = signed_type_for (type);
1555
1556 /* If either operand is not a constant, do the conversions to the signed
1557 type and subtract. The hardware will do the right thing with any
1558 overflow in the subtraction. */
1559 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1560 return size_binop_loc (loc, MINUS_EXPR,
1561 fold_convert_loc (loc, ctype, arg0),
1562 fold_convert_loc (loc, ctype, arg1));
1563
1564 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1565 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1566 overflow) and negate (which can't either). Special-case a result
1567 of zero while we're here. */
1568 if (tree_int_cst_equal (arg0, arg1))
1569 return build_int_cst (ctype, 0);
1570 else if (tree_int_cst_lt (arg1, arg0))
1571 return fold_convert_loc (loc, ctype,
1572 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1573 else
1574 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1575 fold_convert_loc (loc, ctype,
1576 size_binop_loc (loc,
1577 MINUS_EXPR,
1578 arg1, arg0)));
1579 }
1580 \f
1581 /* A subroutine of fold_convert_const handling conversions of an
1582 INTEGER_CST to another integer type. */
1583
1584 static tree
1585 fold_convert_const_int_from_int (tree type, const_tree arg1)
1586 {
1587 tree t;
1588
1589 /* Given an integer constant, make new constant with new type,
1590 appropriately sign-extended or truncated. */
1591 t = force_fit_type_double (type, tree_to_double_int (arg1),
1592 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1593 (TREE_INT_CST_HIGH (arg1) < 0
1594 && (TYPE_UNSIGNED (type)
1595 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1596 | TREE_OVERFLOW (arg1));
1597
1598 return t;
1599 }
1600
1601 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1602 to an integer type. */
1603
1604 static tree
1605 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1606 {
1607 int overflow = 0;
1608 tree t;
1609
1610 /* The following code implements the floating point to integer
1611 conversion rules required by the Java Language Specification,
1612 that IEEE NaNs are mapped to zero and values that overflow
1613 the target precision saturate, i.e. values greater than
1614 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1615 are mapped to INT_MIN. These semantics are allowed by the
1616 C and C++ standards that simply state that the behavior of
1617 FP-to-integer conversion is unspecified upon overflow. */
1618
1619 double_int val;
1620 REAL_VALUE_TYPE r;
1621 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1622
1623 switch (code)
1624 {
1625 case FIX_TRUNC_EXPR:
1626 real_trunc (&r, VOIDmode, &x);
1627 break;
1628
1629 default:
1630 gcc_unreachable ();
1631 }
1632
1633 /* If R is NaN, return zero and show we have an overflow. */
1634 if (REAL_VALUE_ISNAN (r))
1635 {
1636 overflow = 1;
1637 val = double_int_zero;
1638 }
1639
1640 /* See if R is less than the lower bound or greater than the
1641 upper bound. */
1642
1643 if (! overflow)
1644 {
1645 tree lt = TYPE_MIN_VALUE (type);
1646 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1647 if (REAL_VALUES_LESS (r, l))
1648 {
1649 overflow = 1;
1650 val = tree_to_double_int (lt);
1651 }
1652 }
1653
1654 if (! overflow)
1655 {
1656 tree ut = TYPE_MAX_VALUE (type);
1657 if (ut)
1658 {
1659 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1660 if (REAL_VALUES_LESS (u, r))
1661 {
1662 overflow = 1;
1663 val = tree_to_double_int (ut);
1664 }
1665 }
1666 }
1667
1668 if (! overflow)
1669 real_to_integer2 ((HOST_WIDE_INT *) &val.low, &val.high, &r);
1670
1671 t = force_fit_type_double (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1672 return t;
1673 }
1674
1675 /* A subroutine of fold_convert_const handling conversions of a
1676 FIXED_CST to an integer type. */
1677
1678 static tree
1679 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1680 {
1681 tree t;
1682 double_int temp, temp_trunc;
1683 unsigned int mode;
1684
1685 /* Right shift FIXED_CST to temp by fbit. */
1686 temp = TREE_FIXED_CST (arg1).data;
1687 mode = TREE_FIXED_CST (arg1).mode;
1688 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1689 {
1690 temp = temp.rshift (GET_MODE_FBIT (mode),
1691 HOST_BITS_PER_DOUBLE_INT,
1692 SIGNED_FIXED_POINT_MODE_P (mode));
1693
1694 /* Left shift temp to temp_trunc by fbit. */
1695 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1696 HOST_BITS_PER_DOUBLE_INT,
1697 SIGNED_FIXED_POINT_MODE_P (mode));
1698 }
1699 else
1700 {
1701 temp = double_int_zero;
1702 temp_trunc = double_int_zero;
1703 }
1704
1705 /* If FIXED_CST is negative, we need to round the value toward 0.
1706 By checking if the fractional bits are not zero to add 1 to temp. */
1707 if (SIGNED_FIXED_POINT_MODE_P (mode)
1708 && temp_trunc.is_negative ()
1709 && TREE_FIXED_CST (arg1).data != temp_trunc)
1710 temp += double_int_one;
1711
1712 /* Given a fixed-point constant, make new constant with new type,
1713 appropriately sign-extended or truncated. */
1714 t = force_fit_type_double (type, temp, -1,
1715 (temp.is_negative ()
1716 && (TYPE_UNSIGNED (type)
1717 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1718 | TREE_OVERFLOW (arg1));
1719
1720 return t;
1721 }
1722
1723 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1724 to another floating point type. */
1725
1726 static tree
1727 fold_convert_const_real_from_real (tree type, const_tree arg1)
1728 {
1729 REAL_VALUE_TYPE value;
1730 tree t;
1731
1732 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1733 t = build_real (type, value);
1734
1735 /* If converting an infinity or NAN to a representation that doesn't
1736 have one, set the overflow bit so that we can produce some kind of
1737 error message at the appropriate point if necessary. It's not the
1738 most user-friendly message, but it's better than nothing. */
1739 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1740 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1741 TREE_OVERFLOW (t) = 1;
1742 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1743 && !MODE_HAS_NANS (TYPE_MODE (type)))
1744 TREE_OVERFLOW (t) = 1;
1745 /* Regular overflow, conversion produced an infinity in a mode that
1746 can't represent them. */
1747 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1748 && REAL_VALUE_ISINF (value)
1749 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1750 TREE_OVERFLOW (t) = 1;
1751 else
1752 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1753 return t;
1754 }
1755
1756 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1757 to a floating point type. */
1758
1759 static tree
1760 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
1761 {
1762 REAL_VALUE_TYPE value;
1763 tree t;
1764
1765 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
1766 t = build_real (type, value);
1767
1768 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1769 return t;
1770 }
1771
1772 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
1773 to another fixed-point type. */
1774
1775 static tree
1776 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
1777 {
1778 FIXED_VALUE_TYPE value;
1779 tree t;
1780 bool overflow_p;
1781
1782 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
1783 TYPE_SATURATING (type));
1784 t = build_fixed (type, value);
1785
1786 /* Propagate overflow flags. */
1787 if (overflow_p | TREE_OVERFLOW (arg1))
1788 TREE_OVERFLOW (t) = 1;
1789 return t;
1790 }
1791
1792 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
1793 to a fixed-point type. */
1794
1795 static tree
1796 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
1797 {
1798 FIXED_VALUE_TYPE value;
1799 tree t;
1800 bool overflow_p;
1801
1802 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type),
1803 TREE_INT_CST (arg1),
1804 TYPE_UNSIGNED (TREE_TYPE (arg1)),
1805 TYPE_SATURATING (type));
1806 t = build_fixed (type, value);
1807
1808 /* Propagate overflow flags. */
1809 if (overflow_p | TREE_OVERFLOW (arg1))
1810 TREE_OVERFLOW (t) = 1;
1811 return t;
1812 }
1813
1814 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1815 to a fixed-point type. */
1816
1817 static tree
1818 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
1819 {
1820 FIXED_VALUE_TYPE value;
1821 tree t;
1822 bool overflow_p;
1823
1824 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
1825 &TREE_REAL_CST (arg1),
1826 TYPE_SATURATING (type));
1827 t = build_fixed (type, value);
1828
1829 /* Propagate overflow flags. */
1830 if (overflow_p | TREE_OVERFLOW (arg1))
1831 TREE_OVERFLOW (t) = 1;
1832 return t;
1833 }
1834
1835 /* Attempt to fold type conversion operation CODE of expression ARG1 to
1836 type TYPE. If no simplification can be done return NULL_TREE. */
1837
1838 static tree
1839 fold_convert_const (enum tree_code code, tree type, tree arg1)
1840 {
1841 if (TREE_TYPE (arg1) == type)
1842 return arg1;
1843
1844 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
1845 || TREE_CODE (type) == OFFSET_TYPE)
1846 {
1847 if (TREE_CODE (arg1) == INTEGER_CST)
1848 return fold_convert_const_int_from_int (type, arg1);
1849 else if (TREE_CODE (arg1) == REAL_CST)
1850 return fold_convert_const_int_from_real (code, type, arg1);
1851 else if (TREE_CODE (arg1) == FIXED_CST)
1852 return fold_convert_const_int_from_fixed (type, arg1);
1853 }
1854 else if (TREE_CODE (type) == REAL_TYPE)
1855 {
1856 if (TREE_CODE (arg1) == INTEGER_CST)
1857 return build_real_from_int_cst (type, arg1);
1858 else if (TREE_CODE (arg1) == REAL_CST)
1859 return fold_convert_const_real_from_real (type, arg1);
1860 else if (TREE_CODE (arg1) == FIXED_CST)
1861 return fold_convert_const_real_from_fixed (type, arg1);
1862 }
1863 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
1864 {
1865 if (TREE_CODE (arg1) == FIXED_CST)
1866 return fold_convert_const_fixed_from_fixed (type, arg1);
1867 else if (TREE_CODE (arg1) == INTEGER_CST)
1868 return fold_convert_const_fixed_from_int (type, arg1);
1869 else if (TREE_CODE (arg1) == REAL_CST)
1870 return fold_convert_const_fixed_from_real (type, arg1);
1871 }
1872 return NULL_TREE;
1873 }
1874
1875 /* Construct a vector of zero elements of vector type TYPE. */
1876
1877 static tree
1878 build_zero_vector (tree type)
1879 {
1880 tree t;
1881
1882 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
1883 return build_vector_from_val (type, t);
1884 }
1885
1886 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
1887
1888 bool
1889 fold_convertible_p (const_tree type, const_tree arg)
1890 {
1891 tree orig = TREE_TYPE (arg);
1892
1893 if (type == orig)
1894 return true;
1895
1896 if (TREE_CODE (arg) == ERROR_MARK
1897 || TREE_CODE (type) == ERROR_MARK
1898 || TREE_CODE (orig) == ERROR_MARK)
1899 return false;
1900
1901 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
1902 return true;
1903
1904 switch (TREE_CODE (type))
1905 {
1906 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1907 case POINTER_TYPE: case REFERENCE_TYPE:
1908 case OFFSET_TYPE:
1909 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1910 || TREE_CODE (orig) == OFFSET_TYPE)
1911 return true;
1912 return (TREE_CODE (orig) == VECTOR_TYPE
1913 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1914
1915 case REAL_TYPE:
1916 case FIXED_POINT_TYPE:
1917 case COMPLEX_TYPE:
1918 case VECTOR_TYPE:
1919 case VOID_TYPE:
1920 return TREE_CODE (type) == TREE_CODE (orig);
1921
1922 default:
1923 return false;
1924 }
1925 }
1926
1927 /* Convert expression ARG to type TYPE. Used by the middle-end for
1928 simple conversions in preference to calling the front-end's convert. */
1929
1930 tree
1931 fold_convert_loc (location_t loc, tree type, tree arg)
1932 {
1933 tree orig = TREE_TYPE (arg);
1934 tree tem;
1935
1936 if (type == orig)
1937 return arg;
1938
1939 if (TREE_CODE (arg) == ERROR_MARK
1940 || TREE_CODE (type) == ERROR_MARK
1941 || TREE_CODE (orig) == ERROR_MARK)
1942 return error_mark_node;
1943
1944 switch (TREE_CODE (type))
1945 {
1946 case POINTER_TYPE:
1947 case REFERENCE_TYPE:
1948 /* Handle conversions between pointers to different address spaces. */
1949 if (POINTER_TYPE_P (orig)
1950 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
1951 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
1952 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
1953 /* fall through */
1954
1955 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
1956 case OFFSET_TYPE:
1957 if (TREE_CODE (arg) == INTEGER_CST)
1958 {
1959 tem = fold_convert_const (NOP_EXPR, type, arg);
1960 if (tem != NULL_TREE)
1961 return tem;
1962 }
1963 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
1964 || TREE_CODE (orig) == OFFSET_TYPE)
1965 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1966 if (TREE_CODE (orig) == COMPLEX_TYPE)
1967 return fold_convert_loc (loc, type,
1968 fold_build1_loc (loc, REALPART_EXPR,
1969 TREE_TYPE (orig), arg));
1970 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
1971 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
1972 return fold_build1_loc (loc, NOP_EXPR, type, arg);
1973
1974 case REAL_TYPE:
1975 if (TREE_CODE (arg) == INTEGER_CST)
1976 {
1977 tem = fold_convert_const (FLOAT_EXPR, type, arg);
1978 if (tem != NULL_TREE)
1979 return tem;
1980 }
1981 else if (TREE_CODE (arg) == REAL_CST)
1982 {
1983 tem = fold_convert_const (NOP_EXPR, type, arg);
1984 if (tem != NULL_TREE)
1985 return tem;
1986 }
1987 else if (TREE_CODE (arg) == FIXED_CST)
1988 {
1989 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
1990 if (tem != NULL_TREE)
1991 return tem;
1992 }
1993
1994 switch (TREE_CODE (orig))
1995 {
1996 case INTEGER_TYPE:
1997 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
1998 case POINTER_TYPE: case REFERENCE_TYPE:
1999 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2000
2001 case REAL_TYPE:
2002 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2003
2004 case FIXED_POINT_TYPE:
2005 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2006
2007 case COMPLEX_TYPE:
2008 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2009 return fold_convert_loc (loc, type, tem);
2010
2011 default:
2012 gcc_unreachable ();
2013 }
2014
2015 case FIXED_POINT_TYPE:
2016 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2017 || TREE_CODE (arg) == REAL_CST)
2018 {
2019 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2020 if (tem != NULL_TREE)
2021 goto fold_convert_exit;
2022 }
2023
2024 switch (TREE_CODE (orig))
2025 {
2026 case FIXED_POINT_TYPE:
2027 case INTEGER_TYPE:
2028 case ENUMERAL_TYPE:
2029 case BOOLEAN_TYPE:
2030 case REAL_TYPE:
2031 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2032
2033 case COMPLEX_TYPE:
2034 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2035 return fold_convert_loc (loc, type, tem);
2036
2037 default:
2038 gcc_unreachable ();
2039 }
2040
2041 case COMPLEX_TYPE:
2042 switch (TREE_CODE (orig))
2043 {
2044 case INTEGER_TYPE:
2045 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2046 case POINTER_TYPE: case REFERENCE_TYPE:
2047 case REAL_TYPE:
2048 case FIXED_POINT_TYPE:
2049 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2050 fold_convert_loc (loc, TREE_TYPE (type), arg),
2051 fold_convert_loc (loc, TREE_TYPE (type),
2052 integer_zero_node));
2053 case COMPLEX_TYPE:
2054 {
2055 tree rpart, ipart;
2056
2057 if (TREE_CODE (arg) == COMPLEX_EXPR)
2058 {
2059 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2060 TREE_OPERAND (arg, 0));
2061 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2062 TREE_OPERAND (arg, 1));
2063 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2064 }
2065
2066 arg = save_expr (arg);
2067 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2068 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2069 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2070 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2071 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2072 }
2073
2074 default:
2075 gcc_unreachable ();
2076 }
2077
2078 case VECTOR_TYPE:
2079 if (integer_zerop (arg))
2080 return build_zero_vector (type);
2081 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2082 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2083 || TREE_CODE (orig) == VECTOR_TYPE);
2084 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2085
2086 case VOID_TYPE:
2087 tem = fold_ignored_result (arg);
2088 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2089
2090 default:
2091 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2092 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2093 gcc_unreachable ();
2094 }
2095 fold_convert_exit:
2096 protected_set_expr_location_unshare (tem, loc);
2097 return tem;
2098 }
2099 \f
2100 /* Return false if expr can be assumed not to be an lvalue, true
2101 otherwise. */
2102
2103 static bool
2104 maybe_lvalue_p (const_tree x)
2105 {
2106 /* We only need to wrap lvalue tree codes. */
2107 switch (TREE_CODE (x))
2108 {
2109 case VAR_DECL:
2110 case PARM_DECL:
2111 case RESULT_DECL:
2112 case LABEL_DECL:
2113 case FUNCTION_DECL:
2114 case SSA_NAME:
2115
2116 case COMPONENT_REF:
2117 case MEM_REF:
2118 case INDIRECT_REF:
2119 case ARRAY_REF:
2120 case ARRAY_RANGE_REF:
2121 case BIT_FIELD_REF:
2122 case OBJ_TYPE_REF:
2123
2124 case REALPART_EXPR:
2125 case IMAGPART_EXPR:
2126 case PREINCREMENT_EXPR:
2127 case PREDECREMENT_EXPR:
2128 case SAVE_EXPR:
2129 case TRY_CATCH_EXPR:
2130 case WITH_CLEANUP_EXPR:
2131 case COMPOUND_EXPR:
2132 case MODIFY_EXPR:
2133 case TARGET_EXPR:
2134 case COND_EXPR:
2135 case BIND_EXPR:
2136 break;
2137
2138 default:
2139 /* Assume the worst for front-end tree codes. */
2140 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2141 break;
2142 return false;
2143 }
2144
2145 return true;
2146 }
2147
2148 /* Return an expr equal to X but certainly not valid as an lvalue. */
2149
2150 tree
2151 non_lvalue_loc (location_t loc, tree x)
2152 {
2153 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2154 us. */
2155 if (in_gimple_form)
2156 return x;
2157
2158 if (! maybe_lvalue_p (x))
2159 return x;
2160 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2161 }
2162
2163 /* Nonzero means lvalues are limited to those valid in pedantic ANSI C.
2164 Zero means allow extended lvalues. */
2165
2166 int pedantic_lvalues;
2167
2168 /* When pedantic, return an expr equal to X but certainly not valid as a
2169 pedantic lvalue. Otherwise, return X. */
2170
2171 static tree
2172 pedantic_non_lvalue_loc (location_t loc, tree x)
2173 {
2174 if (pedantic_lvalues)
2175 return non_lvalue_loc (loc, x);
2176
2177 return protected_set_expr_location_unshare (x, loc);
2178 }
2179 \f
2180 /* Given a tree comparison code, return the code that is the logical inverse.
2181 It is generally not safe to do this for floating-point comparisons, except
2182 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2183 ERROR_MARK in this case. */
2184
2185 enum tree_code
2186 invert_tree_comparison (enum tree_code code, bool honor_nans)
2187 {
2188 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2189 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2190 return ERROR_MARK;
2191
2192 switch (code)
2193 {
2194 case EQ_EXPR:
2195 return NE_EXPR;
2196 case NE_EXPR:
2197 return EQ_EXPR;
2198 case GT_EXPR:
2199 return honor_nans ? UNLE_EXPR : LE_EXPR;
2200 case GE_EXPR:
2201 return honor_nans ? UNLT_EXPR : LT_EXPR;
2202 case LT_EXPR:
2203 return honor_nans ? UNGE_EXPR : GE_EXPR;
2204 case LE_EXPR:
2205 return honor_nans ? UNGT_EXPR : GT_EXPR;
2206 case LTGT_EXPR:
2207 return UNEQ_EXPR;
2208 case UNEQ_EXPR:
2209 return LTGT_EXPR;
2210 case UNGT_EXPR:
2211 return LE_EXPR;
2212 case UNGE_EXPR:
2213 return LT_EXPR;
2214 case UNLT_EXPR:
2215 return GE_EXPR;
2216 case UNLE_EXPR:
2217 return GT_EXPR;
2218 case ORDERED_EXPR:
2219 return UNORDERED_EXPR;
2220 case UNORDERED_EXPR:
2221 return ORDERED_EXPR;
2222 default:
2223 gcc_unreachable ();
2224 }
2225 }
2226
2227 /* Similar, but return the comparison that results if the operands are
2228 swapped. This is safe for floating-point. */
2229
2230 enum tree_code
2231 swap_tree_comparison (enum tree_code code)
2232 {
2233 switch (code)
2234 {
2235 case EQ_EXPR:
2236 case NE_EXPR:
2237 case ORDERED_EXPR:
2238 case UNORDERED_EXPR:
2239 case LTGT_EXPR:
2240 case UNEQ_EXPR:
2241 return code;
2242 case GT_EXPR:
2243 return LT_EXPR;
2244 case GE_EXPR:
2245 return LE_EXPR;
2246 case LT_EXPR:
2247 return GT_EXPR;
2248 case LE_EXPR:
2249 return GE_EXPR;
2250 case UNGT_EXPR:
2251 return UNLT_EXPR;
2252 case UNGE_EXPR:
2253 return UNLE_EXPR;
2254 case UNLT_EXPR:
2255 return UNGT_EXPR;
2256 case UNLE_EXPR:
2257 return UNGE_EXPR;
2258 default:
2259 gcc_unreachable ();
2260 }
2261 }
2262
2263
2264 /* Convert a comparison tree code from an enum tree_code representation
2265 into a compcode bit-based encoding. This function is the inverse of
2266 compcode_to_comparison. */
2267
2268 static enum comparison_code
2269 comparison_to_compcode (enum tree_code code)
2270 {
2271 switch (code)
2272 {
2273 case LT_EXPR:
2274 return COMPCODE_LT;
2275 case EQ_EXPR:
2276 return COMPCODE_EQ;
2277 case LE_EXPR:
2278 return COMPCODE_LE;
2279 case GT_EXPR:
2280 return COMPCODE_GT;
2281 case NE_EXPR:
2282 return COMPCODE_NE;
2283 case GE_EXPR:
2284 return COMPCODE_GE;
2285 case ORDERED_EXPR:
2286 return COMPCODE_ORD;
2287 case UNORDERED_EXPR:
2288 return COMPCODE_UNORD;
2289 case UNLT_EXPR:
2290 return COMPCODE_UNLT;
2291 case UNEQ_EXPR:
2292 return COMPCODE_UNEQ;
2293 case UNLE_EXPR:
2294 return COMPCODE_UNLE;
2295 case UNGT_EXPR:
2296 return COMPCODE_UNGT;
2297 case LTGT_EXPR:
2298 return COMPCODE_LTGT;
2299 case UNGE_EXPR:
2300 return COMPCODE_UNGE;
2301 default:
2302 gcc_unreachable ();
2303 }
2304 }
2305
2306 /* Convert a compcode bit-based encoding of a comparison operator back
2307 to GCC's enum tree_code representation. This function is the
2308 inverse of comparison_to_compcode. */
2309
2310 static enum tree_code
2311 compcode_to_comparison (enum comparison_code code)
2312 {
2313 switch (code)
2314 {
2315 case COMPCODE_LT:
2316 return LT_EXPR;
2317 case COMPCODE_EQ:
2318 return EQ_EXPR;
2319 case COMPCODE_LE:
2320 return LE_EXPR;
2321 case COMPCODE_GT:
2322 return GT_EXPR;
2323 case COMPCODE_NE:
2324 return NE_EXPR;
2325 case COMPCODE_GE:
2326 return GE_EXPR;
2327 case COMPCODE_ORD:
2328 return ORDERED_EXPR;
2329 case COMPCODE_UNORD:
2330 return UNORDERED_EXPR;
2331 case COMPCODE_UNLT:
2332 return UNLT_EXPR;
2333 case COMPCODE_UNEQ:
2334 return UNEQ_EXPR;
2335 case COMPCODE_UNLE:
2336 return UNLE_EXPR;
2337 case COMPCODE_UNGT:
2338 return UNGT_EXPR;
2339 case COMPCODE_LTGT:
2340 return LTGT_EXPR;
2341 case COMPCODE_UNGE:
2342 return UNGE_EXPR;
2343 default:
2344 gcc_unreachable ();
2345 }
2346 }
2347
2348 /* Return a tree for the comparison which is the combination of
2349 doing the AND or OR (depending on CODE) of the two operations LCODE
2350 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2351 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2352 if this makes the transformation invalid. */
2353
2354 tree
2355 combine_comparisons (location_t loc,
2356 enum tree_code code, enum tree_code lcode,
2357 enum tree_code rcode, tree truth_type,
2358 tree ll_arg, tree lr_arg)
2359 {
2360 bool honor_nans = HONOR_NANS (TYPE_MODE (TREE_TYPE (ll_arg)));
2361 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2362 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2363 int compcode;
2364
2365 switch (code)
2366 {
2367 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2368 compcode = lcompcode & rcompcode;
2369 break;
2370
2371 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2372 compcode = lcompcode | rcompcode;
2373 break;
2374
2375 default:
2376 return NULL_TREE;
2377 }
2378
2379 if (!honor_nans)
2380 {
2381 /* Eliminate unordered comparisons, as well as LTGT and ORD
2382 which are not used unless the mode has NaNs. */
2383 compcode &= ~COMPCODE_UNORD;
2384 if (compcode == COMPCODE_LTGT)
2385 compcode = COMPCODE_NE;
2386 else if (compcode == COMPCODE_ORD)
2387 compcode = COMPCODE_TRUE;
2388 }
2389 else if (flag_trapping_math)
2390 {
2391 /* Check that the original operation and the optimized ones will trap
2392 under the same condition. */
2393 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2394 && (lcompcode != COMPCODE_EQ)
2395 && (lcompcode != COMPCODE_ORD);
2396 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2397 && (rcompcode != COMPCODE_EQ)
2398 && (rcompcode != COMPCODE_ORD);
2399 bool trap = (compcode & COMPCODE_UNORD) == 0
2400 && (compcode != COMPCODE_EQ)
2401 && (compcode != COMPCODE_ORD);
2402
2403 /* In a short-circuited boolean expression the LHS might be
2404 such that the RHS, if evaluated, will never trap. For
2405 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2406 if neither x nor y is NaN. (This is a mixed blessing: for
2407 example, the expression above will never trap, hence
2408 optimizing it to x < y would be invalid). */
2409 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2410 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2411 rtrap = false;
2412
2413 /* If the comparison was short-circuited, and only the RHS
2414 trapped, we may now generate a spurious trap. */
2415 if (rtrap && !ltrap
2416 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2417 return NULL_TREE;
2418
2419 /* If we changed the conditions that cause a trap, we lose. */
2420 if ((ltrap || rtrap) != trap)
2421 return NULL_TREE;
2422 }
2423
2424 if (compcode == COMPCODE_TRUE)
2425 return constant_boolean_node (true, truth_type);
2426 else if (compcode == COMPCODE_FALSE)
2427 return constant_boolean_node (false, truth_type);
2428 else
2429 {
2430 enum tree_code tcode;
2431
2432 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2433 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2434 }
2435 }
2436 \f
2437 /* Return nonzero if two operands (typically of the same tree node)
2438 are necessarily equal. If either argument has side-effects this
2439 function returns zero. FLAGS modifies behavior as follows:
2440
2441 If OEP_ONLY_CONST is set, only return nonzero for constants.
2442 This function tests whether the operands are indistinguishable;
2443 it does not test whether they are equal using C's == operation.
2444 The distinction is important for IEEE floating point, because
2445 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2446 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2447
2448 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2449 even though it may hold multiple values during a function.
2450 This is because a GCC tree node guarantees that nothing else is
2451 executed between the evaluation of its "operands" (which may often
2452 be evaluated in arbitrary order). Hence if the operands themselves
2453 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2454 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2455 unset means assuming isochronic (or instantaneous) tree equivalence.
2456 Unless comparing arbitrary expression trees, such as from different
2457 statements, this flag can usually be left unset.
2458
2459 If OEP_PURE_SAME is set, then pure functions with identical arguments
2460 are considered the same. It is used when the caller has other ways
2461 to ensure that global memory is unchanged in between. */
2462
2463 int
2464 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2465 {
2466 /* If either is ERROR_MARK, they aren't equal. */
2467 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2468 || TREE_TYPE (arg0) == error_mark_node
2469 || TREE_TYPE (arg1) == error_mark_node)
2470 return 0;
2471
2472 /* Similar, if either does not have a type (like a released SSA name),
2473 they aren't equal. */
2474 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2475 return 0;
2476
2477 /* Check equality of integer constants before bailing out due to
2478 precision differences. */
2479 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2480 return tree_int_cst_equal (arg0, arg1);
2481
2482 /* If both types don't have the same signedness, then we can't consider
2483 them equal. We must check this before the STRIP_NOPS calls
2484 because they may change the signedness of the arguments. As pointers
2485 strictly don't have a signedness, require either two pointers or
2486 two non-pointers as well. */
2487 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2488 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2489 return 0;
2490
2491 /* We cannot consider pointers to different address space equal. */
2492 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2493 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2494 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2495 return 0;
2496
2497 /* If both types don't have the same precision, then it is not safe
2498 to strip NOPs. */
2499 if (element_precision (TREE_TYPE (arg0))
2500 != element_precision (TREE_TYPE (arg1)))
2501 return 0;
2502
2503 STRIP_NOPS (arg0);
2504 STRIP_NOPS (arg1);
2505
2506 /* In case both args are comparisons but with different comparison
2507 code, try to swap the comparison operands of one arg to produce
2508 a match and compare that variant. */
2509 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2510 && COMPARISON_CLASS_P (arg0)
2511 && COMPARISON_CLASS_P (arg1))
2512 {
2513 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2514
2515 if (TREE_CODE (arg0) == swap_code)
2516 return operand_equal_p (TREE_OPERAND (arg0, 0),
2517 TREE_OPERAND (arg1, 1), flags)
2518 && operand_equal_p (TREE_OPERAND (arg0, 1),
2519 TREE_OPERAND (arg1, 0), flags);
2520 }
2521
2522 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2523 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2524 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2525 return 0;
2526
2527 /* This is needed for conversions and for COMPONENT_REF.
2528 Might as well play it safe and always test this. */
2529 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2530 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2531 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2532 return 0;
2533
2534 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2535 We don't care about side effects in that case because the SAVE_EXPR
2536 takes care of that for us. In all other cases, two expressions are
2537 equal if they have no side effects. If we have two identical
2538 expressions with side effects that should be treated the same due
2539 to the only side effects being identical SAVE_EXPR's, that will
2540 be detected in the recursive calls below.
2541 If we are taking an invariant address of two identical objects
2542 they are necessarily equal as well. */
2543 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2544 && (TREE_CODE (arg0) == SAVE_EXPR
2545 || (flags & OEP_CONSTANT_ADDRESS_OF)
2546 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2547 return 1;
2548
2549 /* Next handle constant cases, those for which we can return 1 even
2550 if ONLY_CONST is set. */
2551 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2552 switch (TREE_CODE (arg0))
2553 {
2554 case INTEGER_CST:
2555 return tree_int_cst_equal (arg0, arg1);
2556
2557 case FIXED_CST:
2558 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2559 TREE_FIXED_CST (arg1));
2560
2561 case REAL_CST:
2562 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2563 TREE_REAL_CST (arg1)))
2564 return 1;
2565
2566
2567 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0))))
2568 {
2569 /* If we do not distinguish between signed and unsigned zero,
2570 consider them equal. */
2571 if (real_zerop (arg0) && real_zerop (arg1))
2572 return 1;
2573 }
2574 return 0;
2575
2576 case VECTOR_CST:
2577 {
2578 unsigned i;
2579
2580 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2581 return 0;
2582
2583 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2584 {
2585 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2586 VECTOR_CST_ELT (arg1, i), flags))
2587 return 0;
2588 }
2589 return 1;
2590 }
2591
2592 case COMPLEX_CST:
2593 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2594 flags)
2595 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2596 flags));
2597
2598 case STRING_CST:
2599 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2600 && ! memcmp (TREE_STRING_POINTER (arg0),
2601 TREE_STRING_POINTER (arg1),
2602 TREE_STRING_LENGTH (arg0)));
2603
2604 case ADDR_EXPR:
2605 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2606 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2607 ? OEP_CONSTANT_ADDRESS_OF : 0);
2608 default:
2609 break;
2610 }
2611
2612 if (flags & OEP_ONLY_CONST)
2613 return 0;
2614
2615 /* Define macros to test an operand from arg0 and arg1 for equality and a
2616 variant that allows null and views null as being different from any
2617 non-null value. In the latter case, if either is null, the both
2618 must be; otherwise, do the normal comparison. */
2619 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2620 TREE_OPERAND (arg1, N), flags)
2621
2622 #define OP_SAME_WITH_NULL(N) \
2623 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2624 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2625
2626 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2627 {
2628 case tcc_unary:
2629 /* Two conversions are equal only if signedness and modes match. */
2630 switch (TREE_CODE (arg0))
2631 {
2632 CASE_CONVERT:
2633 case FIX_TRUNC_EXPR:
2634 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2635 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2636 return 0;
2637 break;
2638 default:
2639 break;
2640 }
2641
2642 return OP_SAME (0);
2643
2644
2645 case tcc_comparison:
2646 case tcc_binary:
2647 if (OP_SAME (0) && OP_SAME (1))
2648 return 1;
2649
2650 /* For commutative ops, allow the other order. */
2651 return (commutative_tree_code (TREE_CODE (arg0))
2652 && operand_equal_p (TREE_OPERAND (arg0, 0),
2653 TREE_OPERAND (arg1, 1), flags)
2654 && operand_equal_p (TREE_OPERAND (arg0, 1),
2655 TREE_OPERAND (arg1, 0), flags));
2656
2657 case tcc_reference:
2658 /* If either of the pointer (or reference) expressions we are
2659 dereferencing contain a side effect, these cannot be equal,
2660 but their addresses can be. */
2661 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2662 && (TREE_SIDE_EFFECTS (arg0)
2663 || TREE_SIDE_EFFECTS (arg1)))
2664 return 0;
2665
2666 switch (TREE_CODE (arg0))
2667 {
2668 case INDIRECT_REF:
2669 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2670 return OP_SAME (0);
2671
2672 case REALPART_EXPR:
2673 case IMAGPART_EXPR:
2674 return OP_SAME (0);
2675
2676 case TARGET_MEM_REF:
2677 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2678 /* Require equal extra operands and then fall through to MEM_REF
2679 handling of the two common operands. */
2680 if (!OP_SAME_WITH_NULL (2)
2681 || !OP_SAME_WITH_NULL (3)
2682 || !OP_SAME_WITH_NULL (4))
2683 return 0;
2684 /* Fallthru. */
2685 case MEM_REF:
2686 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2687 /* Require equal access sizes, and similar pointer types.
2688 We can have incomplete types for array references of
2689 variable-sized arrays from the Fortran frontend
2690 though. Also verify the types are compatible. */
2691 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2692 || (TYPE_SIZE (TREE_TYPE (arg0))
2693 && TYPE_SIZE (TREE_TYPE (arg1))
2694 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2695 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2696 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2697 && alias_ptr_types_compatible_p
2698 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2699 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2700 && OP_SAME (0) && OP_SAME (1));
2701
2702 case ARRAY_REF:
2703 case ARRAY_RANGE_REF:
2704 /* Operands 2 and 3 may be null.
2705 Compare the array index by value if it is constant first as we
2706 may have different types but same value here. */
2707 if (!OP_SAME (0))
2708 return 0;
2709 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2710 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2711 TREE_OPERAND (arg1, 1))
2712 || OP_SAME (1))
2713 && OP_SAME_WITH_NULL (2)
2714 && OP_SAME_WITH_NULL (3));
2715
2716 case COMPONENT_REF:
2717 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2718 may be NULL when we're called to compare MEM_EXPRs. */
2719 if (!OP_SAME_WITH_NULL (0)
2720 || !OP_SAME (1))
2721 return 0;
2722 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2723 return OP_SAME_WITH_NULL (2);
2724
2725 case BIT_FIELD_REF:
2726 if (!OP_SAME (0))
2727 return 0;
2728 flags &= ~OEP_CONSTANT_ADDRESS_OF;
2729 return OP_SAME (1) && OP_SAME (2);
2730
2731 default:
2732 return 0;
2733 }
2734
2735 case tcc_expression:
2736 switch (TREE_CODE (arg0))
2737 {
2738 case ADDR_EXPR:
2739 case TRUTH_NOT_EXPR:
2740 return OP_SAME (0);
2741
2742 case TRUTH_ANDIF_EXPR:
2743 case TRUTH_ORIF_EXPR:
2744 return OP_SAME (0) && OP_SAME (1);
2745
2746 case FMA_EXPR:
2747 case WIDEN_MULT_PLUS_EXPR:
2748 case WIDEN_MULT_MINUS_EXPR:
2749 if (!OP_SAME (2))
2750 return 0;
2751 /* The multiplcation operands are commutative. */
2752 /* FALLTHRU */
2753
2754 case TRUTH_AND_EXPR:
2755 case TRUTH_OR_EXPR:
2756 case TRUTH_XOR_EXPR:
2757 if (OP_SAME (0) && OP_SAME (1))
2758 return 1;
2759
2760 /* Otherwise take into account this is a commutative operation. */
2761 return (operand_equal_p (TREE_OPERAND (arg0, 0),
2762 TREE_OPERAND (arg1, 1), flags)
2763 && operand_equal_p (TREE_OPERAND (arg0, 1),
2764 TREE_OPERAND (arg1, 0), flags));
2765
2766 case COND_EXPR:
2767 case VEC_COND_EXPR:
2768 case DOT_PROD_EXPR:
2769 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
2770
2771 default:
2772 return 0;
2773 }
2774
2775 case tcc_vl_exp:
2776 switch (TREE_CODE (arg0))
2777 {
2778 case CALL_EXPR:
2779 /* If the CALL_EXPRs call different functions, then they
2780 clearly can not be equal. */
2781 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
2782 flags))
2783 return 0;
2784
2785 {
2786 unsigned int cef = call_expr_flags (arg0);
2787 if (flags & OEP_PURE_SAME)
2788 cef &= ECF_CONST | ECF_PURE;
2789 else
2790 cef &= ECF_CONST;
2791 if (!cef)
2792 return 0;
2793 }
2794
2795 /* Now see if all the arguments are the same. */
2796 {
2797 const_call_expr_arg_iterator iter0, iter1;
2798 const_tree a0, a1;
2799 for (a0 = first_const_call_expr_arg (arg0, &iter0),
2800 a1 = first_const_call_expr_arg (arg1, &iter1);
2801 a0 && a1;
2802 a0 = next_const_call_expr_arg (&iter0),
2803 a1 = next_const_call_expr_arg (&iter1))
2804 if (! operand_equal_p (a0, a1, flags))
2805 return 0;
2806
2807 /* If we get here and both argument lists are exhausted
2808 then the CALL_EXPRs are equal. */
2809 return ! (a0 || a1);
2810 }
2811 default:
2812 return 0;
2813 }
2814
2815 case tcc_declaration:
2816 /* Consider __builtin_sqrt equal to sqrt. */
2817 return (TREE_CODE (arg0) == FUNCTION_DECL
2818 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
2819 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
2820 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
2821
2822 default:
2823 return 0;
2824 }
2825
2826 #undef OP_SAME
2827 #undef OP_SAME_WITH_NULL
2828 }
2829 \f
2830 /* Similar to operand_equal_p, but see if ARG0 might have been made by
2831 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
2832
2833 When in doubt, return 0. */
2834
2835 static int
2836 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
2837 {
2838 int unsignedp1, unsignedpo;
2839 tree primarg0, primarg1, primother;
2840 unsigned int correct_width;
2841
2842 if (operand_equal_p (arg0, arg1, 0))
2843 return 1;
2844
2845 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
2846 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
2847 return 0;
2848
2849 /* Discard any conversions that don't change the modes of ARG0 and ARG1
2850 and see if the inner values are the same. This removes any
2851 signedness comparison, which doesn't matter here. */
2852 primarg0 = arg0, primarg1 = arg1;
2853 STRIP_NOPS (primarg0);
2854 STRIP_NOPS (primarg1);
2855 if (operand_equal_p (primarg0, primarg1, 0))
2856 return 1;
2857
2858 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
2859 actual comparison operand, ARG0.
2860
2861 First throw away any conversions to wider types
2862 already present in the operands. */
2863
2864 primarg1 = get_narrower (arg1, &unsignedp1);
2865 primother = get_narrower (other, &unsignedpo);
2866
2867 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
2868 if (unsignedp1 == unsignedpo
2869 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
2870 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
2871 {
2872 tree type = TREE_TYPE (arg0);
2873
2874 /* Make sure shorter operand is extended the right way
2875 to match the longer operand. */
2876 primarg1 = fold_convert (signed_or_unsigned_type_for
2877 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
2878
2879 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
2880 return 1;
2881 }
2882
2883 return 0;
2884 }
2885 \f
2886 /* See if ARG is an expression that is either a comparison or is performing
2887 arithmetic on comparisons. The comparisons must only be comparing
2888 two different values, which will be stored in *CVAL1 and *CVAL2; if
2889 they are nonzero it means that some operands have already been found.
2890 No variables may be used anywhere else in the expression except in the
2891 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
2892 the expression and save_expr needs to be called with CVAL1 and CVAL2.
2893
2894 If this is true, return 1. Otherwise, return zero. */
2895
2896 static int
2897 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
2898 {
2899 enum tree_code code = TREE_CODE (arg);
2900 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2901
2902 /* We can handle some of the tcc_expression cases here. */
2903 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2904 tclass = tcc_unary;
2905 else if (tclass == tcc_expression
2906 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
2907 || code == COMPOUND_EXPR))
2908 tclass = tcc_binary;
2909
2910 else if (tclass == tcc_expression && code == SAVE_EXPR
2911 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
2912 {
2913 /* If we've already found a CVAL1 or CVAL2, this expression is
2914 two complex to handle. */
2915 if (*cval1 || *cval2)
2916 return 0;
2917
2918 tclass = tcc_unary;
2919 *save_p = 1;
2920 }
2921
2922 switch (tclass)
2923 {
2924 case tcc_unary:
2925 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
2926
2927 case tcc_binary:
2928 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
2929 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2930 cval1, cval2, save_p));
2931
2932 case tcc_constant:
2933 return 1;
2934
2935 case tcc_expression:
2936 if (code == COND_EXPR)
2937 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
2938 cval1, cval2, save_p)
2939 && twoval_comparison_p (TREE_OPERAND (arg, 1),
2940 cval1, cval2, save_p)
2941 && twoval_comparison_p (TREE_OPERAND (arg, 2),
2942 cval1, cval2, save_p));
2943 return 0;
2944
2945 case tcc_comparison:
2946 /* First see if we can handle the first operand, then the second. For
2947 the second operand, we know *CVAL1 can't be zero. It must be that
2948 one side of the comparison is each of the values; test for the
2949 case where this isn't true by failing if the two operands
2950 are the same. */
2951
2952 if (operand_equal_p (TREE_OPERAND (arg, 0),
2953 TREE_OPERAND (arg, 1), 0))
2954 return 0;
2955
2956 if (*cval1 == 0)
2957 *cval1 = TREE_OPERAND (arg, 0);
2958 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
2959 ;
2960 else if (*cval2 == 0)
2961 *cval2 = TREE_OPERAND (arg, 0);
2962 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
2963 ;
2964 else
2965 return 0;
2966
2967 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
2968 ;
2969 else if (*cval2 == 0)
2970 *cval2 = TREE_OPERAND (arg, 1);
2971 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
2972 ;
2973 else
2974 return 0;
2975
2976 return 1;
2977
2978 default:
2979 return 0;
2980 }
2981 }
2982 \f
2983 /* ARG is a tree that is known to contain just arithmetic operations and
2984 comparisons. Evaluate the operations in the tree substituting NEW0 for
2985 any occurrence of OLD0 as an operand of a comparison and likewise for
2986 NEW1 and OLD1. */
2987
2988 static tree
2989 eval_subst (location_t loc, tree arg, tree old0, tree new0,
2990 tree old1, tree new1)
2991 {
2992 tree type = TREE_TYPE (arg);
2993 enum tree_code code = TREE_CODE (arg);
2994 enum tree_code_class tclass = TREE_CODE_CLASS (code);
2995
2996 /* We can handle some of the tcc_expression cases here. */
2997 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
2998 tclass = tcc_unary;
2999 else if (tclass == tcc_expression
3000 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3001 tclass = tcc_binary;
3002
3003 switch (tclass)
3004 {
3005 case tcc_unary:
3006 return fold_build1_loc (loc, code, type,
3007 eval_subst (loc, TREE_OPERAND (arg, 0),
3008 old0, new0, old1, new1));
3009
3010 case tcc_binary:
3011 return fold_build2_loc (loc, code, type,
3012 eval_subst (loc, TREE_OPERAND (arg, 0),
3013 old0, new0, old1, new1),
3014 eval_subst (loc, TREE_OPERAND (arg, 1),
3015 old0, new0, old1, new1));
3016
3017 case tcc_expression:
3018 switch (code)
3019 {
3020 case SAVE_EXPR:
3021 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3022 old1, new1);
3023
3024 case COMPOUND_EXPR:
3025 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3026 old1, new1);
3027
3028 case COND_EXPR:
3029 return fold_build3_loc (loc, code, type,
3030 eval_subst (loc, TREE_OPERAND (arg, 0),
3031 old0, new0, old1, new1),
3032 eval_subst (loc, TREE_OPERAND (arg, 1),
3033 old0, new0, old1, new1),
3034 eval_subst (loc, TREE_OPERAND (arg, 2),
3035 old0, new0, old1, new1));
3036 default:
3037 break;
3038 }
3039 /* Fall through - ??? */
3040
3041 case tcc_comparison:
3042 {
3043 tree arg0 = TREE_OPERAND (arg, 0);
3044 tree arg1 = TREE_OPERAND (arg, 1);
3045
3046 /* We need to check both for exact equality and tree equality. The
3047 former will be true if the operand has a side-effect. In that
3048 case, we know the operand occurred exactly once. */
3049
3050 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3051 arg0 = new0;
3052 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3053 arg0 = new1;
3054
3055 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3056 arg1 = new0;
3057 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3058 arg1 = new1;
3059
3060 return fold_build2_loc (loc, code, type, arg0, arg1);
3061 }
3062
3063 default:
3064 return arg;
3065 }
3066 }
3067 \f
3068 /* Return a tree for the case when the result of an expression is RESULT
3069 converted to TYPE and OMITTED was previously an operand of the expression
3070 but is now not needed (e.g., we folded OMITTED * 0).
3071
3072 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3073 the conversion of RESULT to TYPE. */
3074
3075 tree
3076 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3077 {
3078 tree t = fold_convert_loc (loc, type, result);
3079
3080 /* If the resulting operand is an empty statement, just return the omitted
3081 statement casted to void. */
3082 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3083 return build1_loc (loc, NOP_EXPR, void_type_node,
3084 fold_ignored_result (omitted));
3085
3086 if (TREE_SIDE_EFFECTS (omitted))
3087 return build2_loc (loc, COMPOUND_EXPR, type,
3088 fold_ignored_result (omitted), t);
3089
3090 return non_lvalue_loc (loc, t);
3091 }
3092
3093 /* Similar, but call pedantic_non_lvalue instead of non_lvalue. */
3094
3095 static tree
3096 pedantic_omit_one_operand_loc (location_t loc, tree type, tree result,
3097 tree omitted)
3098 {
3099 tree t = fold_convert_loc (loc, type, result);
3100
3101 /* If the resulting operand is an empty statement, just return the omitted
3102 statement casted to void. */
3103 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3104 return build1_loc (loc, NOP_EXPR, void_type_node,
3105 fold_ignored_result (omitted));
3106
3107 if (TREE_SIDE_EFFECTS (omitted))
3108 return build2_loc (loc, COMPOUND_EXPR, type,
3109 fold_ignored_result (omitted), t);
3110
3111 return pedantic_non_lvalue_loc (loc, t);
3112 }
3113
3114 /* Return a tree for the case when the result of an expression is RESULT
3115 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3116 of the expression but are now not needed.
3117
3118 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3119 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3120 evaluated before OMITTED2. Otherwise, if neither has side effects,
3121 just do the conversion of RESULT to TYPE. */
3122
3123 tree
3124 omit_two_operands_loc (location_t loc, tree type, tree result,
3125 tree omitted1, tree omitted2)
3126 {
3127 tree t = fold_convert_loc (loc, type, result);
3128
3129 if (TREE_SIDE_EFFECTS (omitted2))
3130 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3131 if (TREE_SIDE_EFFECTS (omitted1))
3132 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3133
3134 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3135 }
3136
3137 \f
3138 /* Return a simplified tree node for the truth-negation of ARG. This
3139 never alters ARG itself. We assume that ARG is an operation that
3140 returns a truth value (0 or 1).
3141
3142 FIXME: one would think we would fold the result, but it causes
3143 problems with the dominator optimizer. */
3144
3145 static tree
3146 fold_truth_not_expr (location_t loc, tree arg)
3147 {
3148 tree type = TREE_TYPE (arg);
3149 enum tree_code code = TREE_CODE (arg);
3150 location_t loc1, loc2;
3151
3152 /* If this is a comparison, we can simply invert it, except for
3153 floating-point non-equality comparisons, in which case we just
3154 enclose a TRUTH_NOT_EXPR around what we have. */
3155
3156 if (TREE_CODE_CLASS (code) == tcc_comparison)
3157 {
3158 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3159 if (FLOAT_TYPE_P (op_type)
3160 && flag_trapping_math
3161 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3162 && code != NE_EXPR && code != EQ_EXPR)
3163 return NULL_TREE;
3164
3165 code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (op_type)));
3166 if (code == ERROR_MARK)
3167 return NULL_TREE;
3168
3169 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3170 TREE_OPERAND (arg, 1));
3171 }
3172
3173 switch (code)
3174 {
3175 case INTEGER_CST:
3176 return constant_boolean_node (integer_zerop (arg), type);
3177
3178 case TRUTH_AND_EXPR:
3179 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3180 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3181 return build2_loc (loc, TRUTH_OR_EXPR, type,
3182 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3183 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3184
3185 case TRUTH_OR_EXPR:
3186 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3187 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3188 return build2_loc (loc, TRUTH_AND_EXPR, type,
3189 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3190 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3191
3192 case TRUTH_XOR_EXPR:
3193 /* Here we can invert either operand. We invert the first operand
3194 unless the second operand is a TRUTH_NOT_EXPR in which case our
3195 result is the XOR of the first operand with the inside of the
3196 negation of the second operand. */
3197
3198 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3199 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3200 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3201 else
3202 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3203 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3204 TREE_OPERAND (arg, 1));
3205
3206 case TRUTH_ANDIF_EXPR:
3207 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3208 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3209 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3210 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3211 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3212
3213 case TRUTH_ORIF_EXPR:
3214 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3215 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3216 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3217 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3218 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3219
3220 case TRUTH_NOT_EXPR:
3221 return TREE_OPERAND (arg, 0);
3222
3223 case COND_EXPR:
3224 {
3225 tree arg1 = TREE_OPERAND (arg, 1);
3226 tree arg2 = TREE_OPERAND (arg, 2);
3227
3228 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3229 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3230
3231 /* A COND_EXPR may have a throw as one operand, which
3232 then has void type. Just leave void operands
3233 as they are. */
3234 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3235 VOID_TYPE_P (TREE_TYPE (arg1))
3236 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3237 VOID_TYPE_P (TREE_TYPE (arg2))
3238 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3239 }
3240
3241 case COMPOUND_EXPR:
3242 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3243 return build2_loc (loc, COMPOUND_EXPR, type,
3244 TREE_OPERAND (arg, 0),
3245 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3246
3247 case NON_LVALUE_EXPR:
3248 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3249 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3250
3251 CASE_CONVERT:
3252 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3253 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3254
3255 /* ... fall through ... */
3256
3257 case FLOAT_EXPR:
3258 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3259 return build1_loc (loc, TREE_CODE (arg), type,
3260 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3261
3262 case BIT_AND_EXPR:
3263 if (!integer_onep (TREE_OPERAND (arg, 1)))
3264 return NULL_TREE;
3265 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3266
3267 case SAVE_EXPR:
3268 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3269
3270 case CLEANUP_POINT_EXPR:
3271 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3272 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3273 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3274
3275 default:
3276 return NULL_TREE;
3277 }
3278 }
3279
3280 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3281 assume that ARG is an operation that returns a truth value (0 or 1
3282 for scalars, 0 or -1 for vectors). Return the folded expression if
3283 folding is successful. Otherwise, return NULL_TREE. */
3284
3285 static tree
3286 fold_invert_truthvalue (location_t loc, tree arg)
3287 {
3288 tree type = TREE_TYPE (arg);
3289 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3290 ? BIT_NOT_EXPR
3291 : TRUTH_NOT_EXPR,
3292 type, arg);
3293 }
3294
3295 /* Return a simplified tree node for the truth-negation of ARG. This
3296 never alters ARG itself. We assume that ARG is an operation that
3297 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3298
3299 tree
3300 invert_truthvalue_loc (location_t loc, tree arg)
3301 {
3302 if (TREE_CODE (arg) == ERROR_MARK)
3303 return arg;
3304
3305 tree type = TREE_TYPE (arg);
3306 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3307 ? BIT_NOT_EXPR
3308 : TRUTH_NOT_EXPR,
3309 type, arg);
3310 }
3311
3312 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3313 operands are another bit-wise operation with a common input. If so,
3314 distribute the bit operations to save an operation and possibly two if
3315 constants are involved. For example, convert
3316 (A | B) & (A | C) into A | (B & C)
3317 Further simplification will occur if B and C are constants.
3318
3319 If this optimization cannot be done, 0 will be returned. */
3320
3321 static tree
3322 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3323 tree arg0, tree arg1)
3324 {
3325 tree common;
3326 tree left, right;
3327
3328 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3329 || TREE_CODE (arg0) == code
3330 || (TREE_CODE (arg0) != BIT_AND_EXPR
3331 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3332 return 0;
3333
3334 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3335 {
3336 common = TREE_OPERAND (arg0, 0);
3337 left = TREE_OPERAND (arg0, 1);
3338 right = TREE_OPERAND (arg1, 1);
3339 }
3340 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3341 {
3342 common = TREE_OPERAND (arg0, 0);
3343 left = TREE_OPERAND (arg0, 1);
3344 right = TREE_OPERAND (arg1, 0);
3345 }
3346 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3347 {
3348 common = TREE_OPERAND (arg0, 1);
3349 left = TREE_OPERAND (arg0, 0);
3350 right = TREE_OPERAND (arg1, 1);
3351 }
3352 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3353 {
3354 common = TREE_OPERAND (arg0, 1);
3355 left = TREE_OPERAND (arg0, 0);
3356 right = TREE_OPERAND (arg1, 0);
3357 }
3358 else
3359 return 0;
3360
3361 common = fold_convert_loc (loc, type, common);
3362 left = fold_convert_loc (loc, type, left);
3363 right = fold_convert_loc (loc, type, right);
3364 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3365 fold_build2_loc (loc, code, type, left, right));
3366 }
3367
3368 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3369 with code CODE. This optimization is unsafe. */
3370 static tree
3371 distribute_real_division (location_t loc, enum tree_code code, tree type,
3372 tree arg0, tree arg1)
3373 {
3374 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3375 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3376
3377 /* (A / C) +- (B / C) -> (A +- B) / C. */
3378 if (mul0 == mul1
3379 && operand_equal_p (TREE_OPERAND (arg0, 1),
3380 TREE_OPERAND (arg1, 1), 0))
3381 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3382 fold_build2_loc (loc, code, type,
3383 TREE_OPERAND (arg0, 0),
3384 TREE_OPERAND (arg1, 0)),
3385 TREE_OPERAND (arg0, 1));
3386
3387 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3388 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3389 TREE_OPERAND (arg1, 0), 0)
3390 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3391 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3392 {
3393 REAL_VALUE_TYPE r0, r1;
3394 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3395 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3396 if (!mul0)
3397 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3398 if (!mul1)
3399 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3400 real_arithmetic (&r0, code, &r0, &r1);
3401 return fold_build2_loc (loc, MULT_EXPR, type,
3402 TREE_OPERAND (arg0, 0),
3403 build_real (type, r0));
3404 }
3405
3406 return NULL_TREE;
3407 }
3408 \f
3409 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3410 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3411
3412 static tree
3413 make_bit_field_ref (location_t loc, tree inner, tree type,
3414 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3415 {
3416 tree result, bftype;
3417
3418 if (bitpos == 0)
3419 {
3420 tree size = TYPE_SIZE (TREE_TYPE (inner));
3421 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3422 || POINTER_TYPE_P (TREE_TYPE (inner)))
3423 && host_integerp (size, 0)
3424 && tree_low_cst (size, 0) == bitsize)
3425 return fold_convert_loc (loc, type, inner);
3426 }
3427
3428 bftype = type;
3429 if (TYPE_PRECISION (bftype) != bitsize
3430 || TYPE_UNSIGNED (bftype) == !unsignedp)
3431 bftype = build_nonstandard_integer_type (bitsize, 0);
3432
3433 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3434 size_int (bitsize), bitsize_int (bitpos));
3435
3436 if (bftype != type)
3437 result = fold_convert_loc (loc, type, result);
3438
3439 return result;
3440 }
3441
3442 /* Optimize a bit-field compare.
3443
3444 There are two cases: First is a compare against a constant and the
3445 second is a comparison of two items where the fields are at the same
3446 bit position relative to the start of a chunk (byte, halfword, word)
3447 large enough to contain it. In these cases we can avoid the shift
3448 implicit in bitfield extractions.
3449
3450 For constants, we emit a compare of the shifted constant with the
3451 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3452 compared. For two fields at the same position, we do the ANDs with the
3453 similar mask and compare the result of the ANDs.
3454
3455 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3456 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3457 are the left and right operands of the comparison, respectively.
3458
3459 If the optimization described above can be done, we return the resulting
3460 tree. Otherwise we return zero. */
3461
3462 static tree
3463 optimize_bit_field_compare (location_t loc, enum tree_code code,
3464 tree compare_type, tree lhs, tree rhs)
3465 {
3466 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3467 tree type = TREE_TYPE (lhs);
3468 tree signed_type, unsigned_type;
3469 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3470 enum machine_mode lmode, rmode, nmode;
3471 int lunsignedp, runsignedp;
3472 int lvolatilep = 0, rvolatilep = 0;
3473 tree linner, rinner = NULL_TREE;
3474 tree mask;
3475 tree offset;
3476
3477 /* Get all the information about the extractions being done. If the bit size
3478 if the same as the size of the underlying object, we aren't doing an
3479 extraction at all and so can do nothing. We also don't want to
3480 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3481 then will no longer be able to replace it. */
3482 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3483 &lunsignedp, &lvolatilep, false);
3484 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3485 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3486 return 0;
3487
3488 if (!const_p)
3489 {
3490 /* If this is not a constant, we can only do something if bit positions,
3491 sizes, and signedness are the same. */
3492 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3493 &runsignedp, &rvolatilep, false);
3494
3495 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3496 || lunsignedp != runsignedp || offset != 0
3497 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3498 return 0;
3499 }
3500
3501 /* See if we can find a mode to refer to this field. We should be able to,
3502 but fail if we can't. */
3503 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3504 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3505 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3506 TYPE_ALIGN (TREE_TYPE (rinner))),
3507 word_mode, false);
3508 if (nmode == VOIDmode)
3509 return 0;
3510
3511 /* Set signed and unsigned types of the precision of this mode for the
3512 shifts below. */
3513 signed_type = lang_hooks.types.type_for_mode (nmode, 0);
3514 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3515
3516 /* Compute the bit position and size for the new reference and our offset
3517 within it. If the new reference is the same size as the original, we
3518 won't optimize anything, so return zero. */
3519 nbitsize = GET_MODE_BITSIZE (nmode);
3520 nbitpos = lbitpos & ~ (nbitsize - 1);
3521 lbitpos -= nbitpos;
3522 if (nbitsize == lbitsize)
3523 return 0;
3524
3525 if (BYTES_BIG_ENDIAN)
3526 lbitpos = nbitsize - lbitsize - lbitpos;
3527
3528 /* Make the mask to be used against the extracted field. */
3529 mask = build_int_cst_type (unsigned_type, -1);
3530 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3531 mask = const_binop (RSHIFT_EXPR, mask,
3532 size_int (nbitsize - lbitsize - lbitpos));
3533
3534 if (! const_p)
3535 /* If not comparing with constant, just rework the comparison
3536 and return. */
3537 return fold_build2_loc (loc, code, compare_type,
3538 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3539 make_bit_field_ref (loc, linner,
3540 unsigned_type,
3541 nbitsize, nbitpos,
3542 1),
3543 mask),
3544 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3545 make_bit_field_ref (loc, rinner,
3546 unsigned_type,
3547 nbitsize, nbitpos,
3548 1),
3549 mask));
3550
3551 /* Otherwise, we are handling the constant case. See if the constant is too
3552 big for the field. Warn and return a tree of for 0 (false) if so. We do
3553 this not only for its own sake, but to avoid having to test for this
3554 error case below. If we didn't, we might generate wrong code.
3555
3556 For unsigned fields, the constant shifted right by the field length should
3557 be all zero. For signed fields, the high-order bits should agree with
3558 the sign bit. */
3559
3560 if (lunsignedp)
3561 {
3562 if (! integer_zerop (const_binop (RSHIFT_EXPR,
3563 fold_convert_loc (loc,
3564 unsigned_type, rhs),
3565 size_int (lbitsize))))
3566 {
3567 warning (0, "comparison is always %d due to width of bit-field",
3568 code == NE_EXPR);
3569 return constant_boolean_node (code == NE_EXPR, compare_type);
3570 }
3571 }
3572 else
3573 {
3574 tree tem = const_binop (RSHIFT_EXPR,
3575 fold_convert_loc (loc, signed_type, rhs),
3576 size_int (lbitsize - 1));
3577 if (! integer_zerop (tem) && ! integer_all_onesp (tem))
3578 {
3579 warning (0, "comparison is always %d due to width of bit-field",
3580 code == NE_EXPR);
3581 return constant_boolean_node (code == NE_EXPR, compare_type);
3582 }
3583 }
3584
3585 /* Single-bit compares should always be against zero. */
3586 if (lbitsize == 1 && ! integer_zerop (rhs))
3587 {
3588 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3589 rhs = build_int_cst (type, 0);
3590 }
3591
3592 /* Make a new bitfield reference, shift the constant over the
3593 appropriate number of bits and mask it with the computed mask
3594 (in case this was a signed field). If we changed it, make a new one. */
3595 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3596
3597 rhs = const_binop (BIT_AND_EXPR,
3598 const_binop (LSHIFT_EXPR,
3599 fold_convert_loc (loc, unsigned_type, rhs),
3600 size_int (lbitpos)),
3601 mask);
3602
3603 lhs = build2_loc (loc, code, compare_type,
3604 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3605 return lhs;
3606 }
3607 \f
3608 /* Subroutine for fold_truth_andor_1: decode a field reference.
3609
3610 If EXP is a comparison reference, we return the innermost reference.
3611
3612 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3613 set to the starting bit number.
3614
3615 If the innermost field can be completely contained in a mode-sized
3616 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3617
3618 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3619 otherwise it is not changed.
3620
3621 *PUNSIGNEDP is set to the signedness of the field.
3622
3623 *PMASK is set to the mask used. This is either contained in a
3624 BIT_AND_EXPR or derived from the width of the field.
3625
3626 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3627
3628 Return 0 if this is not a component reference or is one that we can't
3629 do anything with. */
3630
3631 static tree
3632 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3633 HOST_WIDE_INT *pbitpos, enum machine_mode *pmode,
3634 int *punsignedp, int *pvolatilep,
3635 tree *pmask, tree *pand_mask)
3636 {
3637 tree outer_type = 0;
3638 tree and_mask = 0;
3639 tree mask, inner, offset;
3640 tree unsigned_type;
3641 unsigned int precision;
3642
3643 /* All the optimizations using this function assume integer fields.
3644 There are problems with FP fields since the type_for_size call
3645 below can fail for, e.g., XFmode. */
3646 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3647 return 0;
3648
3649 /* We are interested in the bare arrangement of bits, so strip everything
3650 that doesn't affect the machine mode. However, record the type of the
3651 outermost expression if it may matter below. */
3652 if (CONVERT_EXPR_P (exp)
3653 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3654 outer_type = TREE_TYPE (exp);
3655 STRIP_NOPS (exp);
3656
3657 if (TREE_CODE (exp) == BIT_AND_EXPR)
3658 {
3659 and_mask = TREE_OPERAND (exp, 1);
3660 exp = TREE_OPERAND (exp, 0);
3661 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3662 if (TREE_CODE (and_mask) != INTEGER_CST)
3663 return 0;
3664 }
3665
3666 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3667 punsignedp, pvolatilep, false);
3668 if ((inner == exp && and_mask == 0)
3669 || *pbitsize < 0 || offset != 0
3670 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3671 return 0;
3672
3673 /* If the number of bits in the reference is the same as the bitsize of
3674 the outer type, then the outer type gives the signedness. Otherwise
3675 (in case of a small bitfield) the signedness is unchanged. */
3676 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3677 *punsignedp = TYPE_UNSIGNED (outer_type);
3678
3679 /* Compute the mask to access the bitfield. */
3680 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3681 precision = TYPE_PRECISION (unsigned_type);
3682
3683 mask = build_int_cst_type (unsigned_type, -1);
3684
3685 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3686 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3687
3688 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3689 if (and_mask != 0)
3690 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3691 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3692
3693 *pmask = mask;
3694 *pand_mask = and_mask;
3695 return inner;
3696 }
3697
3698 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3699 bit positions. */
3700
3701 static int
3702 all_ones_mask_p (const_tree mask, int size)
3703 {
3704 tree type = TREE_TYPE (mask);
3705 unsigned int precision = TYPE_PRECISION (type);
3706 tree tmask;
3707
3708 tmask = build_int_cst_type (signed_type_for (type), -1);
3709
3710 return
3711 tree_int_cst_equal (mask,
3712 const_binop (RSHIFT_EXPR,
3713 const_binop (LSHIFT_EXPR, tmask,
3714 size_int (precision - size)),
3715 size_int (precision - size)));
3716 }
3717
3718 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3719 represents the sign bit of EXP's type. If EXP represents a sign
3720 or zero extension, also test VAL against the unextended type.
3721 The return value is the (sub)expression whose sign bit is VAL,
3722 or NULL_TREE otherwise. */
3723
3724 static tree
3725 sign_bit_p (tree exp, const_tree val)
3726 {
3727 unsigned HOST_WIDE_INT mask_lo, lo;
3728 HOST_WIDE_INT mask_hi, hi;
3729 int width;
3730 tree t;
3731
3732 /* Tree EXP must have an integral type. */
3733 t = TREE_TYPE (exp);
3734 if (! INTEGRAL_TYPE_P (t))
3735 return NULL_TREE;
3736
3737 /* Tree VAL must be an integer constant. */
3738 if (TREE_CODE (val) != INTEGER_CST
3739 || TREE_OVERFLOW (val))
3740 return NULL_TREE;
3741
3742 width = TYPE_PRECISION (t);
3743 if (width > HOST_BITS_PER_WIDE_INT)
3744 {
3745 hi = (unsigned HOST_WIDE_INT) 1 << (width - HOST_BITS_PER_WIDE_INT - 1);
3746 lo = 0;
3747
3748 mask_hi = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_DOUBLE_INT - width));
3749 mask_lo = -1;
3750 }
3751 else
3752 {
3753 hi = 0;
3754 lo = (unsigned HOST_WIDE_INT) 1 << (width - 1);
3755
3756 mask_hi = 0;
3757 mask_lo = (HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - width));
3758 }
3759
3760 /* We mask off those bits beyond TREE_TYPE (exp) so that we can
3761 treat VAL as if it were unsigned. */
3762 if ((TREE_INT_CST_HIGH (val) & mask_hi) == hi
3763 && (TREE_INT_CST_LOW (val) & mask_lo) == lo)
3764 return exp;
3765
3766 /* Handle extension from a narrower type. */
3767 if (TREE_CODE (exp) == NOP_EXPR
3768 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3769 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3770
3771 return NULL_TREE;
3772 }
3773
3774 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3775 to be evaluated unconditionally. */
3776
3777 static int
3778 simple_operand_p (const_tree exp)
3779 {
3780 /* Strip any conversions that don't change the machine mode. */
3781 STRIP_NOPS (exp);
3782
3783 return (CONSTANT_CLASS_P (exp)
3784 || TREE_CODE (exp) == SSA_NAME
3785 || (DECL_P (exp)
3786 && ! TREE_ADDRESSABLE (exp)
3787 && ! TREE_THIS_VOLATILE (exp)
3788 && ! DECL_NONLOCAL (exp)
3789 /* Don't regard global variables as simple. They may be
3790 allocated in ways unknown to the compiler (shared memory,
3791 #pragma weak, etc). */
3792 && ! TREE_PUBLIC (exp)
3793 && ! DECL_EXTERNAL (exp)
3794 /* Weakrefs are not safe to be read, since they can be NULL.
3795 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
3796 have DECL_WEAK flag set. */
3797 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
3798 /* Loading a static variable is unduly expensive, but global
3799 registers aren't expensive. */
3800 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
3801 }
3802
3803 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
3804 to be evaluated unconditionally.
3805 I addition to simple_operand_p, we assume that comparisons, conversions,
3806 and logic-not operations are simple, if their operands are simple, too. */
3807
3808 static bool
3809 simple_operand_p_2 (tree exp)
3810 {
3811 enum tree_code code;
3812
3813 if (TREE_SIDE_EFFECTS (exp)
3814 || tree_could_trap_p (exp))
3815 return false;
3816
3817 while (CONVERT_EXPR_P (exp))
3818 exp = TREE_OPERAND (exp, 0);
3819
3820 code = TREE_CODE (exp);
3821
3822 if (TREE_CODE_CLASS (code) == tcc_comparison)
3823 return (simple_operand_p (TREE_OPERAND (exp, 0))
3824 && simple_operand_p (TREE_OPERAND (exp, 1)));
3825
3826 if (code == TRUTH_NOT_EXPR)
3827 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
3828
3829 return simple_operand_p (exp);
3830 }
3831
3832 \f
3833 /* The following functions are subroutines to fold_range_test and allow it to
3834 try to change a logical combination of comparisons into a range test.
3835
3836 For example, both
3837 X == 2 || X == 3 || X == 4 || X == 5
3838 and
3839 X >= 2 && X <= 5
3840 are converted to
3841 (unsigned) (X - 2) <= 3
3842
3843 We describe each set of comparisons as being either inside or outside
3844 a range, using a variable named like IN_P, and then describe the
3845 range with a lower and upper bound. If one of the bounds is omitted,
3846 it represents either the highest or lowest value of the type.
3847
3848 In the comments below, we represent a range by two numbers in brackets
3849 preceded by a "+" to designate being inside that range, or a "-" to
3850 designate being outside that range, so the condition can be inverted by
3851 flipping the prefix. An omitted bound is represented by a "-". For
3852 example, "- [-, 10]" means being outside the range starting at the lowest
3853 possible value and ending at 10, in other words, being greater than 10.
3854 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
3855 always false.
3856
3857 We set up things so that the missing bounds are handled in a consistent
3858 manner so neither a missing bound nor "true" and "false" need to be
3859 handled using a special case. */
3860
3861 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
3862 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
3863 and UPPER1_P are nonzero if the respective argument is an upper bound
3864 and zero for a lower. TYPE, if nonzero, is the type of the result; it
3865 must be specified for a comparison. ARG1 will be converted to ARG0's
3866 type if both are specified. */
3867
3868 static tree
3869 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
3870 tree arg1, int upper1_p)
3871 {
3872 tree tem;
3873 int result;
3874 int sgn0, sgn1;
3875
3876 /* If neither arg represents infinity, do the normal operation.
3877 Else, if not a comparison, return infinity. Else handle the special
3878 comparison rules. Note that most of the cases below won't occur, but
3879 are handled for consistency. */
3880
3881 if (arg0 != 0 && arg1 != 0)
3882 {
3883 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
3884 arg0, fold_convert (TREE_TYPE (arg0), arg1));
3885 STRIP_NOPS (tem);
3886 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
3887 }
3888
3889 if (TREE_CODE_CLASS (code) != tcc_comparison)
3890 return 0;
3891
3892 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
3893 for neither. In real maths, we cannot assume open ended ranges are
3894 the same. But, this is computer arithmetic, where numbers are finite.
3895 We can therefore make the transformation of any unbounded range with
3896 the value Z, Z being greater than any representable number. This permits
3897 us to treat unbounded ranges as equal. */
3898 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
3899 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
3900 switch (code)
3901 {
3902 case EQ_EXPR:
3903 result = sgn0 == sgn1;
3904 break;
3905 case NE_EXPR:
3906 result = sgn0 != sgn1;
3907 break;
3908 case LT_EXPR:
3909 result = sgn0 < sgn1;
3910 break;
3911 case LE_EXPR:
3912 result = sgn0 <= sgn1;
3913 break;
3914 case GT_EXPR:
3915 result = sgn0 > sgn1;
3916 break;
3917 case GE_EXPR:
3918 result = sgn0 >= sgn1;
3919 break;
3920 default:
3921 gcc_unreachable ();
3922 }
3923
3924 return constant_boolean_node (result, type);
3925 }
3926 \f
3927 /* Helper routine for make_range. Perform one step for it, return
3928 new expression if the loop should continue or NULL_TREE if it should
3929 stop. */
3930
3931 tree
3932 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
3933 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
3934 bool *strict_overflow_p)
3935 {
3936 tree arg0_type = TREE_TYPE (arg0);
3937 tree n_low, n_high, low = *p_low, high = *p_high;
3938 int in_p = *p_in_p, n_in_p;
3939
3940 switch (code)
3941 {
3942 case TRUTH_NOT_EXPR:
3943 /* We can only do something if the range is testing for zero. */
3944 if (low == NULL_TREE || high == NULL_TREE
3945 || ! integer_zerop (low) || ! integer_zerop (high))
3946 return NULL_TREE;
3947 *p_in_p = ! in_p;
3948 return arg0;
3949
3950 case EQ_EXPR: case NE_EXPR:
3951 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
3952 /* We can only do something if the range is testing for zero
3953 and if the second operand is an integer constant. Note that
3954 saying something is "in" the range we make is done by
3955 complementing IN_P since it will set in the initial case of
3956 being not equal to zero; "out" is leaving it alone. */
3957 if (low == NULL_TREE || high == NULL_TREE
3958 || ! integer_zerop (low) || ! integer_zerop (high)
3959 || TREE_CODE (arg1) != INTEGER_CST)
3960 return NULL_TREE;
3961
3962 switch (code)
3963 {
3964 case NE_EXPR: /* - [c, c] */
3965 low = high = arg1;
3966 break;
3967 case EQ_EXPR: /* + [c, c] */
3968 in_p = ! in_p, low = high = arg1;
3969 break;
3970 case GT_EXPR: /* - [-, c] */
3971 low = 0, high = arg1;
3972 break;
3973 case GE_EXPR: /* + [c, -] */
3974 in_p = ! in_p, low = arg1, high = 0;
3975 break;
3976 case LT_EXPR: /* - [c, -] */
3977 low = arg1, high = 0;
3978 break;
3979 case LE_EXPR: /* + [-, c] */
3980 in_p = ! in_p, low = 0, high = arg1;
3981 break;
3982 default:
3983 gcc_unreachable ();
3984 }
3985
3986 /* If this is an unsigned comparison, we also know that EXP is
3987 greater than or equal to zero. We base the range tests we make
3988 on that fact, so we record it here so we can parse existing
3989 range tests. We test arg0_type since often the return type
3990 of, e.g. EQ_EXPR, is boolean. */
3991 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
3992 {
3993 if (! merge_ranges (&n_in_p, &n_low, &n_high,
3994 in_p, low, high, 1,
3995 build_int_cst (arg0_type, 0),
3996 NULL_TREE))
3997 return NULL_TREE;
3998
3999 in_p = n_in_p, low = n_low, high = n_high;
4000
4001 /* If the high bound is missing, but we have a nonzero low
4002 bound, reverse the range so it goes from zero to the low bound
4003 minus 1. */
4004 if (high == 0 && low && ! integer_zerop (low))
4005 {
4006 in_p = ! in_p;
4007 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4008 integer_one_node, 0);
4009 low = build_int_cst (arg0_type, 0);
4010 }
4011 }
4012
4013 *p_low = low;
4014 *p_high = high;
4015 *p_in_p = in_p;
4016 return arg0;
4017
4018 case NEGATE_EXPR:
4019 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4020 low and high are non-NULL, then normalize will DTRT. */
4021 if (!TYPE_UNSIGNED (arg0_type)
4022 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4023 {
4024 if (low == NULL_TREE)
4025 low = TYPE_MIN_VALUE (arg0_type);
4026 if (high == NULL_TREE)
4027 high = TYPE_MAX_VALUE (arg0_type);
4028 }
4029
4030 /* (-x) IN [a,b] -> x in [-b, -a] */
4031 n_low = range_binop (MINUS_EXPR, exp_type,
4032 build_int_cst (exp_type, 0),
4033 0, high, 1);
4034 n_high = range_binop (MINUS_EXPR, exp_type,
4035 build_int_cst (exp_type, 0),
4036 0, low, 0);
4037 if (n_high != 0 && TREE_OVERFLOW (n_high))
4038 return NULL_TREE;
4039 goto normalize;
4040
4041 case BIT_NOT_EXPR:
4042 /* ~ X -> -X - 1 */
4043 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4044 build_int_cst (exp_type, 1));
4045
4046 case PLUS_EXPR:
4047 case MINUS_EXPR:
4048 if (TREE_CODE (arg1) != INTEGER_CST)
4049 return NULL_TREE;
4050
4051 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4052 move a constant to the other side. */
4053 if (!TYPE_UNSIGNED (arg0_type)
4054 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4055 return NULL_TREE;
4056
4057 /* If EXP is signed, any overflow in the computation is undefined,
4058 so we don't worry about it so long as our computations on
4059 the bounds don't overflow. For unsigned, overflow is defined
4060 and this is exactly the right thing. */
4061 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4062 arg0_type, low, 0, arg1, 0);
4063 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4064 arg0_type, high, 1, arg1, 0);
4065 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4066 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4067 return NULL_TREE;
4068
4069 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4070 *strict_overflow_p = true;
4071
4072 normalize:
4073 /* Check for an unsigned range which has wrapped around the maximum
4074 value thus making n_high < n_low, and normalize it. */
4075 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4076 {
4077 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4078 integer_one_node, 0);
4079 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4080 integer_one_node, 0);
4081
4082 /* If the range is of the form +/- [ x+1, x ], we won't
4083 be able to normalize it. But then, it represents the
4084 whole range or the empty set, so make it
4085 +/- [ -, - ]. */
4086 if (tree_int_cst_equal (n_low, low)
4087 && tree_int_cst_equal (n_high, high))
4088 low = high = 0;
4089 else
4090 in_p = ! in_p;
4091 }
4092 else
4093 low = n_low, high = n_high;
4094
4095 *p_low = low;
4096 *p_high = high;
4097 *p_in_p = in_p;
4098 return arg0;
4099
4100 CASE_CONVERT:
4101 case NON_LVALUE_EXPR:
4102 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4103 return NULL_TREE;
4104
4105 if (! INTEGRAL_TYPE_P (arg0_type)
4106 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4107 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4108 return NULL_TREE;
4109
4110 n_low = low, n_high = high;
4111
4112 if (n_low != 0)
4113 n_low = fold_convert_loc (loc, arg0_type, n_low);
4114
4115 if (n_high != 0)
4116 n_high = fold_convert_loc (loc, arg0_type, n_high);
4117
4118 /* If we're converting arg0 from an unsigned type, to exp,
4119 a signed type, we will be doing the comparison as unsigned.
4120 The tests above have already verified that LOW and HIGH
4121 are both positive.
4122
4123 So we have to ensure that we will handle large unsigned
4124 values the same way that the current signed bounds treat
4125 negative values. */
4126
4127 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4128 {
4129 tree high_positive;
4130 tree equiv_type;
4131 /* For fixed-point modes, we need to pass the saturating flag
4132 as the 2nd parameter. */
4133 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4134 equiv_type
4135 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4136 TYPE_SATURATING (arg0_type));
4137 else
4138 equiv_type
4139 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4140
4141 /* A range without an upper bound is, naturally, unbounded.
4142 Since convert would have cropped a very large value, use
4143 the max value for the destination type. */
4144 high_positive
4145 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4146 : TYPE_MAX_VALUE (arg0_type);
4147
4148 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4149 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4150 fold_convert_loc (loc, arg0_type,
4151 high_positive),
4152 build_int_cst (arg0_type, 1));
4153
4154 /* If the low bound is specified, "and" the range with the
4155 range for which the original unsigned value will be
4156 positive. */
4157 if (low != 0)
4158 {
4159 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4160 1, fold_convert_loc (loc, arg0_type,
4161 integer_zero_node),
4162 high_positive))
4163 return NULL_TREE;
4164
4165 in_p = (n_in_p == in_p);
4166 }
4167 else
4168 {
4169 /* Otherwise, "or" the range with the range of the input
4170 that will be interpreted as negative. */
4171 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4172 1, fold_convert_loc (loc, arg0_type,
4173 integer_zero_node),
4174 high_positive))
4175 return NULL_TREE;
4176
4177 in_p = (in_p != n_in_p);
4178 }
4179 }
4180
4181 *p_low = n_low;
4182 *p_high = n_high;
4183 *p_in_p = in_p;
4184 return arg0;
4185
4186 default:
4187 return NULL_TREE;
4188 }
4189 }
4190
4191 /* Given EXP, a logical expression, set the range it is testing into
4192 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4193 actually being tested. *PLOW and *PHIGH will be made of the same
4194 type as the returned expression. If EXP is not a comparison, we
4195 will most likely not be returning a useful value and range. Set
4196 *STRICT_OVERFLOW_P to true if the return value is only valid
4197 because signed overflow is undefined; otherwise, do not change
4198 *STRICT_OVERFLOW_P. */
4199
4200 tree
4201 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4202 bool *strict_overflow_p)
4203 {
4204 enum tree_code code;
4205 tree arg0, arg1 = NULL_TREE;
4206 tree exp_type, nexp;
4207 int in_p;
4208 tree low, high;
4209 location_t loc = EXPR_LOCATION (exp);
4210
4211 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4212 and see if we can refine the range. Some of the cases below may not
4213 happen, but it doesn't seem worth worrying about this. We "continue"
4214 the outer loop when we've changed something; otherwise we "break"
4215 the switch, which will "break" the while. */
4216
4217 in_p = 0;
4218 low = high = build_int_cst (TREE_TYPE (exp), 0);
4219
4220 while (1)
4221 {
4222 code = TREE_CODE (exp);
4223 exp_type = TREE_TYPE (exp);
4224 arg0 = NULL_TREE;
4225
4226 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4227 {
4228 if (TREE_OPERAND_LENGTH (exp) > 0)
4229 arg0 = TREE_OPERAND (exp, 0);
4230 if (TREE_CODE_CLASS (code) == tcc_binary
4231 || TREE_CODE_CLASS (code) == tcc_comparison
4232 || (TREE_CODE_CLASS (code) == tcc_expression
4233 && TREE_OPERAND_LENGTH (exp) > 1))
4234 arg1 = TREE_OPERAND (exp, 1);
4235 }
4236 if (arg0 == NULL_TREE)
4237 break;
4238
4239 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4240 &high, &in_p, strict_overflow_p);
4241 if (nexp == NULL_TREE)
4242 break;
4243 exp = nexp;
4244 }
4245
4246 /* If EXP is a constant, we can evaluate whether this is true or false. */
4247 if (TREE_CODE (exp) == INTEGER_CST)
4248 {
4249 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4250 exp, 0, low, 0))
4251 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4252 exp, 1, high, 1)));
4253 low = high = 0;
4254 exp = 0;
4255 }
4256
4257 *pin_p = in_p, *plow = low, *phigh = high;
4258 return exp;
4259 }
4260 \f
4261 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4262 type, TYPE, return an expression to test if EXP is in (or out of, depending
4263 on IN_P) the range. Return 0 if the test couldn't be created. */
4264
4265 tree
4266 build_range_check (location_t loc, tree type, tree exp, int in_p,
4267 tree low, tree high)
4268 {
4269 tree etype = TREE_TYPE (exp), value;
4270
4271 #ifdef HAVE_canonicalize_funcptr_for_compare
4272 /* Disable this optimization for function pointer expressions
4273 on targets that require function pointer canonicalization. */
4274 if (HAVE_canonicalize_funcptr_for_compare
4275 && TREE_CODE (etype) == POINTER_TYPE
4276 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4277 return NULL_TREE;
4278 #endif
4279
4280 if (! in_p)
4281 {
4282 value = build_range_check (loc, type, exp, 1, low, high);
4283 if (value != 0)
4284 return invert_truthvalue_loc (loc, value);
4285
4286 return 0;
4287 }
4288
4289 if (low == 0 && high == 0)
4290 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4291
4292 if (low == 0)
4293 return fold_build2_loc (loc, LE_EXPR, type, exp,
4294 fold_convert_loc (loc, etype, high));
4295
4296 if (high == 0)
4297 return fold_build2_loc (loc, GE_EXPR, type, exp,
4298 fold_convert_loc (loc, etype, low));
4299
4300 if (operand_equal_p (low, high, 0))
4301 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4302 fold_convert_loc (loc, etype, low));
4303
4304 if (integer_zerop (low))
4305 {
4306 if (! TYPE_UNSIGNED (etype))
4307 {
4308 etype = unsigned_type_for (etype);
4309 high = fold_convert_loc (loc, etype, high);
4310 exp = fold_convert_loc (loc, etype, exp);
4311 }
4312 return build_range_check (loc, type, exp, 1, 0, high);
4313 }
4314
4315 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4316 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4317 {
4318 unsigned HOST_WIDE_INT lo;
4319 HOST_WIDE_INT hi;
4320 int prec;
4321
4322 prec = TYPE_PRECISION (etype);
4323 if (prec <= HOST_BITS_PER_WIDE_INT)
4324 {
4325 hi = 0;
4326 lo = ((unsigned HOST_WIDE_INT) 1 << (prec - 1)) - 1;
4327 }
4328 else
4329 {
4330 hi = ((HOST_WIDE_INT) 1 << (prec - HOST_BITS_PER_WIDE_INT - 1)) - 1;
4331 lo = HOST_WIDE_INT_M1U;
4332 }
4333
4334 if (TREE_INT_CST_HIGH (high) == hi && TREE_INT_CST_LOW (high) == lo)
4335 {
4336 if (TYPE_UNSIGNED (etype))
4337 {
4338 tree signed_etype = signed_type_for (etype);
4339 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4340 etype
4341 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4342 else
4343 etype = signed_etype;
4344 exp = fold_convert_loc (loc, etype, exp);
4345 }
4346 return fold_build2_loc (loc, GT_EXPR, type, exp,
4347 build_int_cst (etype, 0));
4348 }
4349 }
4350
4351 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4352 This requires wrap-around arithmetics for the type of the expression.
4353 First make sure that arithmetics in this type is valid, then make sure
4354 that it wraps around. */
4355 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4356 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4357 TYPE_UNSIGNED (etype));
4358
4359 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4360 {
4361 tree utype, minv, maxv;
4362
4363 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4364 for the type in question, as we rely on this here. */
4365 utype = unsigned_type_for (etype);
4366 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4367 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4368 integer_one_node, 1);
4369 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4370
4371 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4372 minv, 1, maxv, 1)))
4373 etype = utype;
4374 else
4375 return 0;
4376 }
4377
4378 high = fold_convert_loc (loc, etype, high);
4379 low = fold_convert_loc (loc, etype, low);
4380 exp = fold_convert_loc (loc, etype, exp);
4381
4382 value = const_binop (MINUS_EXPR, high, low);
4383
4384
4385 if (POINTER_TYPE_P (etype))
4386 {
4387 if (value != 0 && !TREE_OVERFLOW (value))
4388 {
4389 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4390 return build_range_check (loc, type,
4391 fold_build_pointer_plus_loc (loc, exp, low),
4392 1, build_int_cst (etype, 0), value);
4393 }
4394 return 0;
4395 }
4396
4397 if (value != 0 && !TREE_OVERFLOW (value))
4398 return build_range_check (loc, type,
4399 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4400 1, build_int_cst (etype, 0), value);
4401
4402 return 0;
4403 }
4404 \f
4405 /* Return the predecessor of VAL in its type, handling the infinite case. */
4406
4407 static tree
4408 range_predecessor (tree val)
4409 {
4410 tree type = TREE_TYPE (val);
4411
4412 if (INTEGRAL_TYPE_P (type)
4413 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4414 return 0;
4415 else
4416 return range_binop (MINUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4417 }
4418
4419 /* Return the successor of VAL in its type, handling the infinite case. */
4420
4421 static tree
4422 range_successor (tree val)
4423 {
4424 tree type = TREE_TYPE (val);
4425
4426 if (INTEGRAL_TYPE_P (type)
4427 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4428 return 0;
4429 else
4430 return range_binop (PLUS_EXPR, NULL_TREE, val, 0, integer_one_node, 0);
4431 }
4432
4433 /* Given two ranges, see if we can merge them into one. Return 1 if we
4434 can, 0 if we can't. Set the output range into the specified parameters. */
4435
4436 bool
4437 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4438 tree high0, int in1_p, tree low1, tree high1)
4439 {
4440 int no_overlap;
4441 int subset;
4442 int temp;
4443 tree tem;
4444 int in_p;
4445 tree low, high;
4446 int lowequal = ((low0 == 0 && low1 == 0)
4447 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4448 low0, 0, low1, 0)));
4449 int highequal = ((high0 == 0 && high1 == 0)
4450 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4451 high0, 1, high1, 1)));
4452
4453 /* Make range 0 be the range that starts first, or ends last if they
4454 start at the same value. Swap them if it isn't. */
4455 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4456 low0, 0, low1, 0))
4457 || (lowequal
4458 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4459 high1, 1, high0, 1))))
4460 {
4461 temp = in0_p, in0_p = in1_p, in1_p = temp;
4462 tem = low0, low0 = low1, low1 = tem;
4463 tem = high0, high0 = high1, high1 = tem;
4464 }
4465
4466 /* Now flag two cases, whether the ranges are disjoint or whether the
4467 second range is totally subsumed in the first. Note that the tests
4468 below are simplified by the ones above. */
4469 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4470 high0, 1, low1, 0));
4471 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4472 high1, 1, high0, 1));
4473
4474 /* We now have four cases, depending on whether we are including or
4475 excluding the two ranges. */
4476 if (in0_p && in1_p)
4477 {
4478 /* If they don't overlap, the result is false. If the second range
4479 is a subset it is the result. Otherwise, the range is from the start
4480 of the second to the end of the first. */
4481 if (no_overlap)
4482 in_p = 0, low = high = 0;
4483 else if (subset)
4484 in_p = 1, low = low1, high = high1;
4485 else
4486 in_p = 1, low = low1, high = high0;
4487 }
4488
4489 else if (in0_p && ! in1_p)
4490 {
4491 /* If they don't overlap, the result is the first range. If they are
4492 equal, the result is false. If the second range is a subset of the
4493 first, and the ranges begin at the same place, we go from just after
4494 the end of the second range to the end of the first. If the second
4495 range is not a subset of the first, or if it is a subset and both
4496 ranges end at the same place, the range starts at the start of the
4497 first range and ends just before the second range.
4498 Otherwise, we can't describe this as a single range. */
4499 if (no_overlap)
4500 in_p = 1, low = low0, high = high0;
4501 else if (lowequal && highequal)
4502 in_p = 0, low = high = 0;
4503 else if (subset && lowequal)
4504 {
4505 low = range_successor (high1);
4506 high = high0;
4507 in_p = 1;
4508 if (low == 0)
4509 {
4510 /* We are in the weird situation where high0 > high1 but
4511 high1 has no successor. Punt. */
4512 return 0;
4513 }
4514 }
4515 else if (! subset || highequal)
4516 {
4517 low = low0;
4518 high = range_predecessor (low1);
4519 in_p = 1;
4520 if (high == 0)
4521 {
4522 /* low0 < low1 but low1 has no predecessor. Punt. */
4523 return 0;
4524 }
4525 }
4526 else
4527 return 0;
4528 }
4529
4530 else if (! in0_p && in1_p)
4531 {
4532 /* If they don't overlap, the result is the second range. If the second
4533 is a subset of the first, the result is false. Otherwise,
4534 the range starts just after the first range and ends at the
4535 end of the second. */
4536 if (no_overlap)
4537 in_p = 1, low = low1, high = high1;
4538 else if (subset || highequal)
4539 in_p = 0, low = high = 0;
4540 else
4541 {
4542 low = range_successor (high0);
4543 high = high1;
4544 in_p = 1;
4545 if (low == 0)
4546 {
4547 /* high1 > high0 but high0 has no successor. Punt. */
4548 return 0;
4549 }
4550 }
4551 }
4552
4553 else
4554 {
4555 /* The case where we are excluding both ranges. Here the complex case
4556 is if they don't overlap. In that case, the only time we have a
4557 range is if they are adjacent. If the second is a subset of the
4558 first, the result is the first. Otherwise, the range to exclude
4559 starts at the beginning of the first range and ends at the end of the
4560 second. */
4561 if (no_overlap)
4562 {
4563 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4564 range_successor (high0),
4565 1, low1, 0)))
4566 in_p = 0, low = low0, high = high1;
4567 else
4568 {
4569 /* Canonicalize - [min, x] into - [-, x]. */
4570 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4571 switch (TREE_CODE (TREE_TYPE (low0)))
4572 {
4573 case ENUMERAL_TYPE:
4574 if (TYPE_PRECISION (TREE_TYPE (low0))
4575 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4576 break;
4577 /* FALLTHROUGH */
4578 case INTEGER_TYPE:
4579 if (tree_int_cst_equal (low0,
4580 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4581 low0 = 0;
4582 break;
4583 case POINTER_TYPE:
4584 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4585 && integer_zerop (low0))
4586 low0 = 0;
4587 break;
4588 default:
4589 break;
4590 }
4591
4592 /* Canonicalize - [x, max] into - [x, -]. */
4593 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4594 switch (TREE_CODE (TREE_TYPE (high1)))
4595 {
4596 case ENUMERAL_TYPE:
4597 if (TYPE_PRECISION (TREE_TYPE (high1))
4598 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4599 break;
4600 /* FALLTHROUGH */
4601 case INTEGER_TYPE:
4602 if (tree_int_cst_equal (high1,
4603 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4604 high1 = 0;
4605 break;
4606 case POINTER_TYPE:
4607 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4608 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4609 high1, 1,
4610 integer_one_node, 1)))
4611 high1 = 0;
4612 break;
4613 default:
4614 break;
4615 }
4616
4617 /* The ranges might be also adjacent between the maximum and
4618 minimum values of the given type. For
4619 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4620 return + [x + 1, y - 1]. */
4621 if (low0 == 0 && high1 == 0)
4622 {
4623 low = range_successor (high0);
4624 high = range_predecessor (low1);
4625 if (low == 0 || high == 0)
4626 return 0;
4627
4628 in_p = 1;
4629 }
4630 else
4631 return 0;
4632 }
4633 }
4634 else if (subset)
4635 in_p = 0, low = low0, high = high0;
4636 else
4637 in_p = 0, low = low0, high = high1;
4638 }
4639
4640 *pin_p = in_p, *plow = low, *phigh = high;
4641 return 1;
4642 }
4643 \f
4644
4645 /* Subroutine of fold, looking inside expressions of the form
4646 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4647 of the COND_EXPR. This function is being used also to optimize
4648 A op B ? C : A, by reversing the comparison first.
4649
4650 Return a folded expression whose code is not a COND_EXPR
4651 anymore, or NULL_TREE if no folding opportunity is found. */
4652
4653 static tree
4654 fold_cond_expr_with_comparison (location_t loc, tree type,
4655 tree arg0, tree arg1, tree arg2)
4656 {
4657 enum tree_code comp_code = TREE_CODE (arg0);
4658 tree arg00 = TREE_OPERAND (arg0, 0);
4659 tree arg01 = TREE_OPERAND (arg0, 1);
4660 tree arg1_type = TREE_TYPE (arg1);
4661 tree tem;
4662
4663 STRIP_NOPS (arg1);
4664 STRIP_NOPS (arg2);
4665
4666 /* If we have A op 0 ? A : -A, consider applying the following
4667 transformations:
4668
4669 A == 0? A : -A same as -A
4670 A != 0? A : -A same as A
4671 A >= 0? A : -A same as abs (A)
4672 A > 0? A : -A same as abs (A)
4673 A <= 0? A : -A same as -abs (A)
4674 A < 0? A : -A same as -abs (A)
4675
4676 None of these transformations work for modes with signed
4677 zeros. If A is +/-0, the first two transformations will
4678 change the sign of the result (from +0 to -0, or vice
4679 versa). The last four will fix the sign of the result,
4680 even though the original expressions could be positive or
4681 negative, depending on the sign of A.
4682
4683 Note that all these transformations are correct if A is
4684 NaN, since the two alternatives (A and -A) are also NaNs. */
4685 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4686 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4687 ? real_zerop (arg01)
4688 : integer_zerop (arg01))
4689 && ((TREE_CODE (arg2) == NEGATE_EXPR
4690 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4691 /* In the case that A is of the form X-Y, '-A' (arg2) may
4692 have already been folded to Y-X, check for that. */
4693 || (TREE_CODE (arg1) == MINUS_EXPR
4694 && TREE_CODE (arg2) == MINUS_EXPR
4695 && operand_equal_p (TREE_OPERAND (arg1, 0),
4696 TREE_OPERAND (arg2, 1), 0)
4697 && operand_equal_p (TREE_OPERAND (arg1, 1),
4698 TREE_OPERAND (arg2, 0), 0))))
4699 switch (comp_code)
4700 {
4701 case EQ_EXPR:
4702 case UNEQ_EXPR:
4703 tem = fold_convert_loc (loc, arg1_type, arg1);
4704 return pedantic_non_lvalue_loc (loc,
4705 fold_convert_loc (loc, type,
4706 negate_expr (tem)));
4707 case NE_EXPR:
4708 case LTGT_EXPR:
4709 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4710 case UNGE_EXPR:
4711 case UNGT_EXPR:
4712 if (flag_trapping_math)
4713 break;
4714 /* Fall through. */
4715 case GE_EXPR:
4716 case GT_EXPR:
4717 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4718 arg1 = fold_convert_loc (loc, signed_type_for
4719 (TREE_TYPE (arg1)), arg1);
4720 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4721 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4722 case UNLE_EXPR:
4723 case UNLT_EXPR:
4724 if (flag_trapping_math)
4725 break;
4726 case LE_EXPR:
4727 case LT_EXPR:
4728 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4729 arg1 = fold_convert_loc (loc, signed_type_for
4730 (TREE_TYPE (arg1)), arg1);
4731 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4732 return negate_expr (fold_convert_loc (loc, type, tem));
4733 default:
4734 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4735 break;
4736 }
4737
4738 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4739 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4740 both transformations are correct when A is NaN: A != 0
4741 is then true, and A == 0 is false. */
4742
4743 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4744 && integer_zerop (arg01) && integer_zerop (arg2))
4745 {
4746 if (comp_code == NE_EXPR)
4747 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4748 else if (comp_code == EQ_EXPR)
4749 return build_zero_cst (type);
4750 }
4751
4752 /* Try some transformations of A op B ? A : B.
4753
4754 A == B? A : B same as B
4755 A != B? A : B same as A
4756 A >= B? A : B same as max (A, B)
4757 A > B? A : B same as max (B, A)
4758 A <= B? A : B same as min (A, B)
4759 A < B? A : B same as min (B, A)
4760
4761 As above, these transformations don't work in the presence
4762 of signed zeros. For example, if A and B are zeros of
4763 opposite sign, the first two transformations will change
4764 the sign of the result. In the last four, the original
4765 expressions give different results for (A=+0, B=-0) and
4766 (A=-0, B=+0), but the transformed expressions do not.
4767
4768 The first two transformations are correct if either A or B
4769 is a NaN. In the first transformation, the condition will
4770 be false, and B will indeed be chosen. In the case of the
4771 second transformation, the condition A != B will be true,
4772 and A will be chosen.
4773
4774 The conversions to max() and min() are not correct if B is
4775 a number and A is not. The conditions in the original
4776 expressions will be false, so all four give B. The min()
4777 and max() versions would give a NaN instead. */
4778 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type))
4779 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4780 /* Avoid these transformations if the COND_EXPR may be used
4781 as an lvalue in the C++ front-end. PR c++/19199. */
4782 && (in_gimple_form
4783 || VECTOR_TYPE_P (type)
4784 || (strcmp (lang_hooks.name, "GNU C++") != 0
4785 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4786 || ! maybe_lvalue_p (arg1)
4787 || ! maybe_lvalue_p (arg2)))
4788 {
4789 tree comp_op0 = arg00;
4790 tree comp_op1 = arg01;
4791 tree comp_type = TREE_TYPE (comp_op0);
4792
4793 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
4794 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
4795 {
4796 comp_type = type;
4797 comp_op0 = arg1;
4798 comp_op1 = arg2;
4799 }
4800
4801 switch (comp_code)
4802 {
4803 case EQ_EXPR:
4804 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
4805 case NE_EXPR:
4806 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4807 case LE_EXPR:
4808 case LT_EXPR:
4809 case UNLE_EXPR:
4810 case UNLT_EXPR:
4811 /* In C++ a ?: expression can be an lvalue, so put the
4812 operand which will be used if they are equal first
4813 so that we can convert this back to the
4814 corresponding COND_EXPR. */
4815 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4816 {
4817 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4818 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4819 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
4820 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
4821 : fold_build2_loc (loc, MIN_EXPR, comp_type,
4822 comp_op1, comp_op0);
4823 return pedantic_non_lvalue_loc (loc,
4824 fold_convert_loc (loc, type, tem));
4825 }
4826 break;
4827 case GE_EXPR:
4828 case GT_EXPR:
4829 case UNGE_EXPR:
4830 case UNGT_EXPR:
4831 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4832 {
4833 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
4834 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
4835 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
4836 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
4837 : fold_build2_loc (loc, MAX_EXPR, comp_type,
4838 comp_op1, comp_op0);
4839 return pedantic_non_lvalue_loc (loc,
4840 fold_convert_loc (loc, type, tem));
4841 }
4842 break;
4843 case UNEQ_EXPR:
4844 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4845 return pedantic_non_lvalue_loc (loc,
4846 fold_convert_loc (loc, type, arg2));
4847 break;
4848 case LTGT_EXPR:
4849 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1))))
4850 return pedantic_non_lvalue_loc (loc,
4851 fold_convert_loc (loc, type, arg1));
4852 break;
4853 default:
4854 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4855 break;
4856 }
4857 }
4858
4859 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
4860 we might still be able to simplify this. For example,
4861 if C1 is one less or one more than C2, this might have started
4862 out as a MIN or MAX and been transformed by this function.
4863 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
4864
4865 if (INTEGRAL_TYPE_P (type)
4866 && TREE_CODE (arg01) == INTEGER_CST
4867 && TREE_CODE (arg2) == INTEGER_CST)
4868 switch (comp_code)
4869 {
4870 case EQ_EXPR:
4871 if (TREE_CODE (arg1) == INTEGER_CST)
4872 break;
4873 /* We can replace A with C1 in this case. */
4874 arg1 = fold_convert_loc (loc, type, arg01);
4875 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
4876
4877 case LT_EXPR:
4878 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
4879 MIN_EXPR, to preserve the signedness of the comparison. */
4880 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4881 OEP_ONLY_CONST)
4882 && operand_equal_p (arg01,
4883 const_binop (PLUS_EXPR, arg2,
4884 build_int_cst (type, 1)),
4885 OEP_ONLY_CONST))
4886 {
4887 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4888 fold_convert_loc (loc, TREE_TYPE (arg00),
4889 arg2));
4890 return pedantic_non_lvalue_loc (loc,
4891 fold_convert_loc (loc, type, tem));
4892 }
4893 break;
4894
4895 case LE_EXPR:
4896 /* If C1 is C2 - 1, this is min(A, C2), with the same care
4897 as above. */
4898 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4899 OEP_ONLY_CONST)
4900 && operand_equal_p (arg01,
4901 const_binop (MINUS_EXPR, arg2,
4902 build_int_cst (type, 1)),
4903 OEP_ONLY_CONST))
4904 {
4905 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
4906 fold_convert_loc (loc, TREE_TYPE (arg00),
4907 arg2));
4908 return pedantic_non_lvalue_loc (loc,
4909 fold_convert_loc (loc, type, tem));
4910 }
4911 break;
4912
4913 case GT_EXPR:
4914 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
4915 MAX_EXPR, to preserve the signedness of the comparison. */
4916 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
4917 OEP_ONLY_CONST)
4918 && operand_equal_p (arg01,
4919 const_binop (MINUS_EXPR, arg2,
4920 build_int_cst (type, 1)),
4921 OEP_ONLY_CONST))
4922 {
4923 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4924 fold_convert_loc (loc, TREE_TYPE (arg00),
4925 arg2));
4926 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4927 }
4928 break;
4929
4930 case GE_EXPR:
4931 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
4932 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
4933 OEP_ONLY_CONST)
4934 && operand_equal_p (arg01,
4935 const_binop (PLUS_EXPR, arg2,
4936 build_int_cst (type, 1)),
4937 OEP_ONLY_CONST))
4938 {
4939 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
4940 fold_convert_loc (loc, TREE_TYPE (arg00),
4941 arg2));
4942 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4943 }
4944 break;
4945 case NE_EXPR:
4946 break;
4947 default:
4948 gcc_unreachable ();
4949 }
4950
4951 return NULL_TREE;
4952 }
4953
4954
4955 \f
4956 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
4957 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
4958 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
4959 false) >= 2)
4960 #endif
4961
4962 /* EXP is some logical combination of boolean tests. See if we can
4963 merge it into some range test. Return the new tree if so. */
4964
4965 static tree
4966 fold_range_test (location_t loc, enum tree_code code, tree type,
4967 tree op0, tree op1)
4968 {
4969 int or_op = (code == TRUTH_ORIF_EXPR
4970 || code == TRUTH_OR_EXPR);
4971 int in0_p, in1_p, in_p;
4972 tree low0, low1, low, high0, high1, high;
4973 bool strict_overflow_p = false;
4974 tree tem, lhs, rhs;
4975 const char * const warnmsg = G_("assuming signed overflow does not occur "
4976 "when simplifying range test");
4977
4978 if (!INTEGRAL_TYPE_P (type))
4979 return 0;
4980
4981 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
4982 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
4983
4984 /* If this is an OR operation, invert both sides; we will invert
4985 again at the end. */
4986 if (or_op)
4987 in0_p = ! in0_p, in1_p = ! in1_p;
4988
4989 /* If both expressions are the same, if we can merge the ranges, and we
4990 can build the range test, return it or it inverted. If one of the
4991 ranges is always true or always false, consider it to be the same
4992 expression as the other. */
4993 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
4994 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
4995 in1_p, low1, high1)
4996 && 0 != (tem = (build_range_check (loc, type,
4997 lhs != 0 ? lhs
4998 : rhs != 0 ? rhs : integer_zero_node,
4999 in_p, low, high))))
5000 {
5001 if (strict_overflow_p)
5002 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5003 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5004 }
5005
5006 /* On machines where the branch cost is expensive, if this is a
5007 short-circuited branch and the underlying object on both sides
5008 is the same, make a non-short-circuit operation. */
5009 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5010 && lhs != 0 && rhs != 0
5011 && (code == TRUTH_ANDIF_EXPR
5012 || code == TRUTH_ORIF_EXPR)
5013 && operand_equal_p (lhs, rhs, 0))
5014 {
5015 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5016 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5017 which cases we can't do this. */
5018 if (simple_operand_p (lhs))
5019 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5020 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5021 type, op0, op1);
5022
5023 else if (!lang_hooks.decls.global_bindings_p ()
5024 && !CONTAINS_PLACEHOLDER_P (lhs))
5025 {
5026 tree common = save_expr (lhs);
5027
5028 if (0 != (lhs = build_range_check (loc, type, common,
5029 or_op ? ! in0_p : in0_p,
5030 low0, high0))
5031 && (0 != (rhs = build_range_check (loc, type, common,
5032 or_op ? ! in1_p : in1_p,
5033 low1, high1))))
5034 {
5035 if (strict_overflow_p)
5036 fold_overflow_warning (warnmsg,
5037 WARN_STRICT_OVERFLOW_COMPARISON);
5038 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5039 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5040 type, lhs, rhs);
5041 }
5042 }
5043 }
5044
5045 return 0;
5046 }
5047 \f
5048 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5049 bit value. Arrange things so the extra bits will be set to zero if and
5050 only if C is signed-extended to its full width. If MASK is nonzero,
5051 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5052
5053 static tree
5054 unextend (tree c, int p, int unsignedp, tree mask)
5055 {
5056 tree type = TREE_TYPE (c);
5057 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5058 tree temp;
5059
5060 if (p == modesize || unsignedp)
5061 return c;
5062
5063 /* We work by getting just the sign bit into the low-order bit, then
5064 into the high-order bit, then sign-extend. We then XOR that value
5065 with C. */
5066 temp = const_binop (RSHIFT_EXPR, c, size_int (p - 1));
5067 temp = const_binop (BIT_AND_EXPR, temp, size_int (1));
5068
5069 /* We must use a signed type in order to get an arithmetic right shift.
5070 However, we must also avoid introducing accidental overflows, so that
5071 a subsequent call to integer_zerop will work. Hence we must
5072 do the type conversion here. At this point, the constant is either
5073 zero or one, and the conversion to a signed type can never overflow.
5074 We could get an overflow if this conversion is done anywhere else. */
5075 if (TYPE_UNSIGNED (type))
5076 temp = fold_convert (signed_type_for (type), temp);
5077
5078 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5079 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5080 if (mask != 0)
5081 temp = const_binop (BIT_AND_EXPR, temp,
5082 fold_convert (TREE_TYPE (c), mask));
5083 /* If necessary, convert the type back to match the type of C. */
5084 if (TYPE_UNSIGNED (type))
5085 temp = fold_convert (type, temp);
5086
5087 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5088 }
5089 \f
5090 /* For an expression that has the form
5091 (A && B) || ~B
5092 or
5093 (A || B) && ~B,
5094 we can drop one of the inner expressions and simplify to
5095 A || ~B
5096 or
5097 A && ~B
5098 LOC is the location of the resulting expression. OP is the inner
5099 logical operation; the left-hand side in the examples above, while CMPOP
5100 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5101 removing a condition that guards another, as in
5102 (A != NULL && A->...) || A == NULL
5103 which we must not transform. If RHS_ONLY is true, only eliminate the
5104 right-most operand of the inner logical operation. */
5105
5106 static tree
5107 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5108 bool rhs_only)
5109 {
5110 tree type = TREE_TYPE (cmpop);
5111 enum tree_code code = TREE_CODE (cmpop);
5112 enum tree_code truthop_code = TREE_CODE (op);
5113 tree lhs = TREE_OPERAND (op, 0);
5114 tree rhs = TREE_OPERAND (op, 1);
5115 tree orig_lhs = lhs, orig_rhs = rhs;
5116 enum tree_code rhs_code = TREE_CODE (rhs);
5117 enum tree_code lhs_code = TREE_CODE (lhs);
5118 enum tree_code inv_code;
5119
5120 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5121 return NULL_TREE;
5122
5123 if (TREE_CODE_CLASS (code) != tcc_comparison)
5124 return NULL_TREE;
5125
5126 if (rhs_code == truthop_code)
5127 {
5128 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5129 if (newrhs != NULL_TREE)
5130 {
5131 rhs = newrhs;
5132 rhs_code = TREE_CODE (rhs);
5133 }
5134 }
5135 if (lhs_code == truthop_code && !rhs_only)
5136 {
5137 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5138 if (newlhs != NULL_TREE)
5139 {
5140 lhs = newlhs;
5141 lhs_code = TREE_CODE (lhs);
5142 }
5143 }
5144
5145 inv_code = invert_tree_comparison (code, HONOR_NANS (TYPE_MODE (type)));
5146 if (inv_code == rhs_code
5147 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5148 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5149 return lhs;
5150 if (!rhs_only && inv_code == lhs_code
5151 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5152 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5153 return rhs;
5154 if (rhs != orig_rhs || lhs != orig_lhs)
5155 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5156 lhs, rhs);
5157 return NULL_TREE;
5158 }
5159
5160 /* Find ways of folding logical expressions of LHS and RHS:
5161 Try to merge two comparisons to the same innermost item.
5162 Look for range tests like "ch >= '0' && ch <= '9'".
5163 Look for combinations of simple terms on machines with expensive branches
5164 and evaluate the RHS unconditionally.
5165
5166 For example, if we have p->a == 2 && p->b == 4 and we can make an
5167 object large enough to span both A and B, we can do this with a comparison
5168 against the object ANDed with the a mask.
5169
5170 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5171 operations to do this with one comparison.
5172
5173 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5174 function and the one above.
5175
5176 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5177 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5178
5179 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5180 two operands.
5181
5182 We return the simplified tree or 0 if no optimization is possible. */
5183
5184 static tree
5185 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5186 tree lhs, tree rhs)
5187 {
5188 /* If this is the "or" of two comparisons, we can do something if
5189 the comparisons are NE_EXPR. If this is the "and", we can do something
5190 if the comparisons are EQ_EXPR. I.e.,
5191 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5192
5193 WANTED_CODE is this operation code. For single bit fields, we can
5194 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5195 comparison for one-bit fields. */
5196
5197 enum tree_code wanted_code;
5198 enum tree_code lcode, rcode;
5199 tree ll_arg, lr_arg, rl_arg, rr_arg;
5200 tree ll_inner, lr_inner, rl_inner, rr_inner;
5201 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5202 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5203 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5204 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5205 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5206 enum machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5207 enum machine_mode lnmode, rnmode;
5208 tree ll_mask, lr_mask, rl_mask, rr_mask;
5209 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5210 tree l_const, r_const;
5211 tree lntype, rntype, result;
5212 HOST_WIDE_INT first_bit, end_bit;
5213 int volatilep;
5214
5215 /* Start by getting the comparison codes. Fail if anything is volatile.
5216 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5217 it were surrounded with a NE_EXPR. */
5218
5219 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5220 return 0;
5221
5222 lcode = TREE_CODE (lhs);
5223 rcode = TREE_CODE (rhs);
5224
5225 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5226 {
5227 lhs = build2 (NE_EXPR, truth_type, lhs,
5228 build_int_cst (TREE_TYPE (lhs), 0));
5229 lcode = NE_EXPR;
5230 }
5231
5232 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5233 {
5234 rhs = build2 (NE_EXPR, truth_type, rhs,
5235 build_int_cst (TREE_TYPE (rhs), 0));
5236 rcode = NE_EXPR;
5237 }
5238
5239 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5240 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5241 return 0;
5242
5243 ll_arg = TREE_OPERAND (lhs, 0);
5244 lr_arg = TREE_OPERAND (lhs, 1);
5245 rl_arg = TREE_OPERAND (rhs, 0);
5246 rr_arg = TREE_OPERAND (rhs, 1);
5247
5248 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5249 if (simple_operand_p (ll_arg)
5250 && simple_operand_p (lr_arg))
5251 {
5252 if (operand_equal_p (ll_arg, rl_arg, 0)
5253 && operand_equal_p (lr_arg, rr_arg, 0))
5254 {
5255 result = combine_comparisons (loc, code, lcode, rcode,
5256 truth_type, ll_arg, lr_arg);
5257 if (result)
5258 return result;
5259 }
5260 else if (operand_equal_p (ll_arg, rr_arg, 0)
5261 && operand_equal_p (lr_arg, rl_arg, 0))
5262 {
5263 result = combine_comparisons (loc, code, lcode,
5264 swap_tree_comparison (rcode),
5265 truth_type, ll_arg, lr_arg);
5266 if (result)
5267 return result;
5268 }
5269 }
5270
5271 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5272 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5273
5274 /* If the RHS can be evaluated unconditionally and its operands are
5275 simple, it wins to evaluate the RHS unconditionally on machines
5276 with expensive branches. In this case, this isn't a comparison
5277 that can be merged. */
5278
5279 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5280 false) >= 2
5281 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5282 && simple_operand_p (rl_arg)
5283 && simple_operand_p (rr_arg))
5284 {
5285 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5286 if (code == TRUTH_OR_EXPR
5287 && lcode == NE_EXPR && integer_zerop (lr_arg)
5288 && rcode == NE_EXPR && integer_zerop (rr_arg)
5289 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5290 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5291 return build2_loc (loc, NE_EXPR, truth_type,
5292 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5293 ll_arg, rl_arg),
5294 build_int_cst (TREE_TYPE (ll_arg), 0));
5295
5296 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5297 if (code == TRUTH_AND_EXPR
5298 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5299 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5300 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5301 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5302 return build2_loc (loc, EQ_EXPR, truth_type,
5303 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5304 ll_arg, rl_arg),
5305 build_int_cst (TREE_TYPE (ll_arg), 0));
5306 }
5307
5308 /* See if the comparisons can be merged. Then get all the parameters for
5309 each side. */
5310
5311 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5312 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5313 return 0;
5314
5315 volatilep = 0;
5316 ll_inner = decode_field_reference (loc, ll_arg,
5317 &ll_bitsize, &ll_bitpos, &ll_mode,
5318 &ll_unsignedp, &volatilep, &ll_mask,
5319 &ll_and_mask);
5320 lr_inner = decode_field_reference (loc, lr_arg,
5321 &lr_bitsize, &lr_bitpos, &lr_mode,
5322 &lr_unsignedp, &volatilep, &lr_mask,
5323 &lr_and_mask);
5324 rl_inner = decode_field_reference (loc, rl_arg,
5325 &rl_bitsize, &rl_bitpos, &rl_mode,
5326 &rl_unsignedp, &volatilep, &rl_mask,
5327 &rl_and_mask);
5328 rr_inner = decode_field_reference (loc, rr_arg,
5329 &rr_bitsize, &rr_bitpos, &rr_mode,
5330 &rr_unsignedp, &volatilep, &rr_mask,
5331 &rr_and_mask);
5332
5333 /* It must be true that the inner operation on the lhs of each
5334 comparison must be the same if we are to be able to do anything.
5335 Then see if we have constants. If not, the same must be true for
5336 the rhs's. */
5337 if (volatilep || ll_inner == 0 || rl_inner == 0
5338 || ! operand_equal_p (ll_inner, rl_inner, 0))
5339 return 0;
5340
5341 if (TREE_CODE (lr_arg) == INTEGER_CST
5342 && TREE_CODE (rr_arg) == INTEGER_CST)
5343 l_const = lr_arg, r_const = rr_arg;
5344 else if (lr_inner == 0 || rr_inner == 0
5345 || ! operand_equal_p (lr_inner, rr_inner, 0))
5346 return 0;
5347 else
5348 l_const = r_const = 0;
5349
5350 /* If either comparison code is not correct for our logical operation,
5351 fail. However, we can convert a one-bit comparison against zero into
5352 the opposite comparison against that bit being set in the field. */
5353
5354 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5355 if (lcode != wanted_code)
5356 {
5357 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5358 {
5359 /* Make the left operand unsigned, since we are only interested
5360 in the value of one bit. Otherwise we are doing the wrong
5361 thing below. */
5362 ll_unsignedp = 1;
5363 l_const = ll_mask;
5364 }
5365 else
5366 return 0;
5367 }
5368
5369 /* This is analogous to the code for l_const above. */
5370 if (rcode != wanted_code)
5371 {
5372 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5373 {
5374 rl_unsignedp = 1;
5375 r_const = rl_mask;
5376 }
5377 else
5378 return 0;
5379 }
5380
5381 /* See if we can find a mode that contains both fields being compared on
5382 the left. If we can't, fail. Otherwise, update all constants and masks
5383 to be relative to a field of that size. */
5384 first_bit = MIN (ll_bitpos, rl_bitpos);
5385 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5386 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5387 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5388 volatilep);
5389 if (lnmode == VOIDmode)
5390 return 0;
5391
5392 lnbitsize = GET_MODE_BITSIZE (lnmode);
5393 lnbitpos = first_bit & ~ (lnbitsize - 1);
5394 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5395 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5396
5397 if (BYTES_BIG_ENDIAN)
5398 {
5399 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5400 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5401 }
5402
5403 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5404 size_int (xll_bitpos));
5405 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5406 size_int (xrl_bitpos));
5407
5408 if (l_const)
5409 {
5410 l_const = fold_convert_loc (loc, lntype, l_const);
5411 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5412 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5413 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5414 fold_build1_loc (loc, BIT_NOT_EXPR,
5415 lntype, ll_mask))))
5416 {
5417 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5418
5419 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5420 }
5421 }
5422 if (r_const)
5423 {
5424 r_const = fold_convert_loc (loc, lntype, r_const);
5425 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5426 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5427 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5428 fold_build1_loc (loc, BIT_NOT_EXPR,
5429 lntype, rl_mask))))
5430 {
5431 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5432
5433 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5434 }
5435 }
5436
5437 /* If the right sides are not constant, do the same for it. Also,
5438 disallow this optimization if a size or signedness mismatch occurs
5439 between the left and right sides. */
5440 if (l_const == 0)
5441 {
5442 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5443 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5444 /* Make sure the two fields on the right
5445 correspond to the left without being swapped. */
5446 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5447 return 0;
5448
5449 first_bit = MIN (lr_bitpos, rr_bitpos);
5450 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5451 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5452 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5453 volatilep);
5454 if (rnmode == VOIDmode)
5455 return 0;
5456
5457 rnbitsize = GET_MODE_BITSIZE (rnmode);
5458 rnbitpos = first_bit & ~ (rnbitsize - 1);
5459 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5460 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5461
5462 if (BYTES_BIG_ENDIAN)
5463 {
5464 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5465 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5466 }
5467
5468 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5469 rntype, lr_mask),
5470 size_int (xlr_bitpos));
5471 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5472 rntype, rr_mask),
5473 size_int (xrr_bitpos));
5474
5475 /* Make a mask that corresponds to both fields being compared.
5476 Do this for both items being compared. If the operands are the
5477 same size and the bits being compared are in the same position
5478 then we can do this by masking both and comparing the masked
5479 results. */
5480 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5481 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5482 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5483 {
5484 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5485 ll_unsignedp || rl_unsignedp);
5486 if (! all_ones_mask_p (ll_mask, lnbitsize))
5487 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5488
5489 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5490 lr_unsignedp || rr_unsignedp);
5491 if (! all_ones_mask_p (lr_mask, rnbitsize))
5492 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5493
5494 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5495 }
5496
5497 /* There is still another way we can do something: If both pairs of
5498 fields being compared are adjacent, we may be able to make a wider
5499 field containing them both.
5500
5501 Note that we still must mask the lhs/rhs expressions. Furthermore,
5502 the mask must be shifted to account for the shift done by
5503 make_bit_field_ref. */
5504 if ((ll_bitsize + ll_bitpos == rl_bitpos
5505 && lr_bitsize + lr_bitpos == rr_bitpos)
5506 || (ll_bitpos == rl_bitpos + rl_bitsize
5507 && lr_bitpos == rr_bitpos + rr_bitsize))
5508 {
5509 tree type;
5510
5511 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5512 ll_bitsize + rl_bitsize,
5513 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5514 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5515 lr_bitsize + rr_bitsize,
5516 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5517
5518 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5519 size_int (MIN (xll_bitpos, xrl_bitpos)));
5520 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5521 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5522
5523 /* Convert to the smaller type before masking out unwanted bits. */
5524 type = lntype;
5525 if (lntype != rntype)
5526 {
5527 if (lnbitsize > rnbitsize)
5528 {
5529 lhs = fold_convert_loc (loc, rntype, lhs);
5530 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5531 type = rntype;
5532 }
5533 else if (lnbitsize < rnbitsize)
5534 {
5535 rhs = fold_convert_loc (loc, lntype, rhs);
5536 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5537 type = lntype;
5538 }
5539 }
5540
5541 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5542 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5543
5544 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5545 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5546
5547 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5548 }
5549
5550 return 0;
5551 }
5552
5553 /* Handle the case of comparisons with constants. If there is something in
5554 common between the masks, those bits of the constants must be the same.
5555 If not, the condition is always false. Test for this to avoid generating
5556 incorrect code below. */
5557 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5558 if (! integer_zerop (result)
5559 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5560 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5561 {
5562 if (wanted_code == NE_EXPR)
5563 {
5564 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5565 return constant_boolean_node (true, truth_type);
5566 }
5567 else
5568 {
5569 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5570 return constant_boolean_node (false, truth_type);
5571 }
5572 }
5573
5574 /* Construct the expression we will return. First get the component
5575 reference we will make. Unless the mask is all ones the width of
5576 that field, perform the mask operation. Then compare with the
5577 merged constant. */
5578 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5579 ll_unsignedp || rl_unsignedp);
5580
5581 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5582 if (! all_ones_mask_p (ll_mask, lnbitsize))
5583 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5584
5585 return build2_loc (loc, wanted_code, truth_type, result,
5586 const_binop (BIT_IOR_EXPR, l_const, r_const));
5587 }
5588 \f
5589 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5590 constant. */
5591
5592 static tree
5593 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5594 tree op0, tree op1)
5595 {
5596 tree arg0 = op0;
5597 enum tree_code op_code;
5598 tree comp_const;
5599 tree minmax_const;
5600 int consts_equal, consts_lt;
5601 tree inner;
5602
5603 STRIP_SIGN_NOPS (arg0);
5604
5605 op_code = TREE_CODE (arg0);
5606 minmax_const = TREE_OPERAND (arg0, 1);
5607 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5608 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5609 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5610 inner = TREE_OPERAND (arg0, 0);
5611
5612 /* If something does not permit us to optimize, return the original tree. */
5613 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5614 || TREE_CODE (comp_const) != INTEGER_CST
5615 || TREE_OVERFLOW (comp_const)
5616 || TREE_CODE (minmax_const) != INTEGER_CST
5617 || TREE_OVERFLOW (minmax_const))
5618 return NULL_TREE;
5619
5620 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5621 and GT_EXPR, doing the rest with recursive calls using logical
5622 simplifications. */
5623 switch (code)
5624 {
5625 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5626 {
5627 tree tem
5628 = optimize_minmax_comparison (loc,
5629 invert_tree_comparison (code, false),
5630 type, op0, op1);
5631 if (tem)
5632 return invert_truthvalue_loc (loc, tem);
5633 return NULL_TREE;
5634 }
5635
5636 case GE_EXPR:
5637 return
5638 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5639 optimize_minmax_comparison
5640 (loc, EQ_EXPR, type, arg0, comp_const),
5641 optimize_minmax_comparison
5642 (loc, GT_EXPR, type, arg0, comp_const));
5643
5644 case EQ_EXPR:
5645 if (op_code == MAX_EXPR && consts_equal)
5646 /* MAX (X, 0) == 0 -> X <= 0 */
5647 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5648
5649 else if (op_code == MAX_EXPR && consts_lt)
5650 /* MAX (X, 0) == 5 -> X == 5 */
5651 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5652
5653 else if (op_code == MAX_EXPR)
5654 /* MAX (X, 0) == -1 -> false */
5655 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5656
5657 else if (consts_equal)
5658 /* MIN (X, 0) == 0 -> X >= 0 */
5659 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5660
5661 else if (consts_lt)
5662 /* MIN (X, 0) == 5 -> false */
5663 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5664
5665 else
5666 /* MIN (X, 0) == -1 -> X == -1 */
5667 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5668
5669 case GT_EXPR:
5670 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5671 /* MAX (X, 0) > 0 -> X > 0
5672 MAX (X, 0) > 5 -> X > 5 */
5673 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5674
5675 else if (op_code == MAX_EXPR)
5676 /* MAX (X, 0) > -1 -> true */
5677 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5678
5679 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5680 /* MIN (X, 0) > 0 -> false
5681 MIN (X, 0) > 5 -> false */
5682 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5683
5684 else
5685 /* MIN (X, 0) > -1 -> X > -1 */
5686 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5687
5688 default:
5689 return NULL_TREE;
5690 }
5691 }
5692 \f
5693 /* T is an integer expression that is being multiplied, divided, or taken a
5694 modulus (CODE says which and what kind of divide or modulus) by a
5695 constant C. See if we can eliminate that operation by folding it with
5696 other operations already in T. WIDE_TYPE, if non-null, is a type that
5697 should be used for the computation if wider than our type.
5698
5699 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5700 (X * 2) + (Y * 4). We must, however, be assured that either the original
5701 expression would not overflow or that overflow is undefined for the type
5702 in the language in question.
5703
5704 If we return a non-null expression, it is an equivalent form of the
5705 original computation, but need not be in the original type.
5706
5707 We set *STRICT_OVERFLOW_P to true if the return values depends on
5708 signed overflow being undefined. Otherwise we do not change
5709 *STRICT_OVERFLOW_P. */
5710
5711 static tree
5712 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5713 bool *strict_overflow_p)
5714 {
5715 /* To avoid exponential search depth, refuse to allow recursion past
5716 three levels. Beyond that (1) it's highly unlikely that we'll find
5717 something interesting and (2) we've probably processed it before
5718 when we built the inner expression. */
5719
5720 static int depth;
5721 tree ret;
5722
5723 if (depth > 3)
5724 return NULL;
5725
5726 depth++;
5727 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5728 depth--;
5729
5730 return ret;
5731 }
5732
5733 static tree
5734 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5735 bool *strict_overflow_p)
5736 {
5737 tree type = TREE_TYPE (t);
5738 enum tree_code tcode = TREE_CODE (t);
5739 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5740 > GET_MODE_SIZE (TYPE_MODE (type)))
5741 ? wide_type : type);
5742 tree t1, t2;
5743 int same_p = tcode == code;
5744 tree op0 = NULL_TREE, op1 = NULL_TREE;
5745 bool sub_strict_overflow_p;
5746
5747 /* Don't deal with constants of zero here; they confuse the code below. */
5748 if (integer_zerop (c))
5749 return NULL_TREE;
5750
5751 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5752 op0 = TREE_OPERAND (t, 0);
5753
5754 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5755 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5756
5757 /* Note that we need not handle conditional operations here since fold
5758 already handles those cases. So just do arithmetic here. */
5759 switch (tcode)
5760 {
5761 case INTEGER_CST:
5762 /* For a constant, we can always simplify if we are a multiply
5763 or (for divide and modulus) if it is a multiple of our constant. */
5764 if (code == MULT_EXPR
5765 || integer_zerop (const_binop (TRUNC_MOD_EXPR, t, c)))
5766 return const_binop (code, fold_convert (ctype, t),
5767 fold_convert (ctype, c));
5768 break;
5769
5770 CASE_CONVERT: case NON_LVALUE_EXPR:
5771 /* If op0 is an expression ... */
5772 if ((COMPARISON_CLASS_P (op0)
5773 || UNARY_CLASS_P (op0)
5774 || BINARY_CLASS_P (op0)
5775 || VL_EXP_CLASS_P (op0)
5776 || EXPRESSION_CLASS_P (op0))
5777 /* ... and has wrapping overflow, and its type is smaller
5778 than ctype, then we cannot pass through as widening. */
5779 && ((TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0))
5780 && (TYPE_PRECISION (ctype)
5781 > TYPE_PRECISION (TREE_TYPE (op0))))
5782 /* ... or this is a truncation (t is narrower than op0),
5783 then we cannot pass through this narrowing. */
5784 || (TYPE_PRECISION (type)
5785 < TYPE_PRECISION (TREE_TYPE (op0)))
5786 /* ... or signedness changes for division or modulus,
5787 then we cannot pass through this conversion. */
5788 || (code != MULT_EXPR
5789 && (TYPE_UNSIGNED (ctype)
5790 != TYPE_UNSIGNED (TREE_TYPE (op0))))
5791 /* ... or has undefined overflow while the converted to
5792 type has not, we cannot do the operation in the inner type
5793 as that would introduce undefined overflow. */
5794 || (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0))
5795 && !TYPE_OVERFLOW_UNDEFINED (type))))
5796 break;
5797
5798 /* Pass the constant down and see if we can make a simplification. If
5799 we can, replace this expression with the inner simplification for
5800 possible later conversion to our or some other type. */
5801 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
5802 && TREE_CODE (t2) == INTEGER_CST
5803 && !TREE_OVERFLOW (t2)
5804 && (0 != (t1 = extract_muldiv (op0, t2, code,
5805 code == MULT_EXPR
5806 ? ctype : NULL_TREE,
5807 strict_overflow_p))))
5808 return t1;
5809 break;
5810
5811 case ABS_EXPR:
5812 /* If widening the type changes it from signed to unsigned, then we
5813 must avoid building ABS_EXPR itself as unsigned. */
5814 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
5815 {
5816 tree cstype = (*signed_type_for) (ctype);
5817 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
5818 != 0)
5819 {
5820 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
5821 return fold_convert (ctype, t1);
5822 }
5823 break;
5824 }
5825 /* If the constant is negative, we cannot simplify this. */
5826 if (tree_int_cst_sgn (c) == -1)
5827 break;
5828 /* FALLTHROUGH */
5829 case NEGATE_EXPR:
5830 /* For division and modulus, type can't be unsigned, as e.g.
5831 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
5832 For signed types, even with wrapping overflow, this is fine. */
5833 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
5834 break;
5835 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
5836 != 0)
5837 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
5838 break;
5839
5840 case MIN_EXPR: case MAX_EXPR:
5841 /* If widening the type changes the signedness, then we can't perform
5842 this optimization as that changes the result. */
5843 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
5844 break;
5845
5846 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
5847 sub_strict_overflow_p = false;
5848 if ((t1 = extract_muldiv (op0, c, code, wide_type,
5849 &sub_strict_overflow_p)) != 0
5850 && (t2 = extract_muldiv (op1, c, code, wide_type,
5851 &sub_strict_overflow_p)) != 0)
5852 {
5853 if (tree_int_cst_sgn (c) < 0)
5854 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
5855 if (sub_strict_overflow_p)
5856 *strict_overflow_p = true;
5857 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5858 fold_convert (ctype, t2));
5859 }
5860 break;
5861
5862 case LSHIFT_EXPR: case RSHIFT_EXPR:
5863 /* If the second operand is constant, this is a multiplication
5864 or floor division, by a power of two, so we can treat it that
5865 way unless the multiplier or divisor overflows. Signed
5866 left-shift overflow is implementation-defined rather than
5867 undefined in C90, so do not convert signed left shift into
5868 multiplication. */
5869 if (TREE_CODE (op1) == INTEGER_CST
5870 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
5871 /* const_binop may not detect overflow correctly,
5872 so check for it explicitly here. */
5873 && TYPE_PRECISION (TREE_TYPE (size_one_node)) > TREE_INT_CST_LOW (op1)
5874 && TREE_INT_CST_HIGH (op1) == 0
5875 && 0 != (t1 = fold_convert (ctype,
5876 const_binop (LSHIFT_EXPR,
5877 size_one_node,
5878 op1)))
5879 && !TREE_OVERFLOW (t1))
5880 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
5881 ? MULT_EXPR : FLOOR_DIV_EXPR,
5882 ctype,
5883 fold_convert (ctype, op0),
5884 t1),
5885 c, code, wide_type, strict_overflow_p);
5886 break;
5887
5888 case PLUS_EXPR: case MINUS_EXPR:
5889 /* See if we can eliminate the operation on both sides. If we can, we
5890 can return a new PLUS or MINUS. If we can't, the only remaining
5891 cases where we can do anything are if the second operand is a
5892 constant. */
5893 sub_strict_overflow_p = false;
5894 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
5895 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
5896 if (t1 != 0 && t2 != 0
5897 && (code == MULT_EXPR
5898 /* If not multiplication, we can only do this if both operands
5899 are divisible by c. */
5900 || (multiple_of_p (ctype, op0, c)
5901 && multiple_of_p (ctype, op1, c))))
5902 {
5903 if (sub_strict_overflow_p)
5904 *strict_overflow_p = true;
5905 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
5906 fold_convert (ctype, t2));
5907 }
5908
5909 /* If this was a subtraction, negate OP1 and set it to be an addition.
5910 This simplifies the logic below. */
5911 if (tcode == MINUS_EXPR)
5912 {
5913 tcode = PLUS_EXPR, op1 = negate_expr (op1);
5914 /* If OP1 was not easily negatable, the constant may be OP0. */
5915 if (TREE_CODE (op0) == INTEGER_CST)
5916 {
5917 tree tem = op0;
5918 op0 = op1;
5919 op1 = tem;
5920 tem = t1;
5921 t1 = t2;
5922 t2 = tem;
5923 }
5924 }
5925
5926 if (TREE_CODE (op1) != INTEGER_CST)
5927 break;
5928
5929 /* If either OP1 or C are negative, this optimization is not safe for
5930 some of the division and remainder types while for others we need
5931 to change the code. */
5932 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
5933 {
5934 if (code == CEIL_DIV_EXPR)
5935 code = FLOOR_DIV_EXPR;
5936 else if (code == FLOOR_DIV_EXPR)
5937 code = CEIL_DIV_EXPR;
5938 else if (code != MULT_EXPR
5939 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
5940 break;
5941 }
5942
5943 /* If it's a multiply or a division/modulus operation of a multiple
5944 of our constant, do the operation and verify it doesn't overflow. */
5945 if (code == MULT_EXPR
5946 || integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5947 {
5948 op1 = const_binop (code, fold_convert (ctype, op1),
5949 fold_convert (ctype, c));
5950 /* We allow the constant to overflow with wrapping semantics. */
5951 if (op1 == 0
5952 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
5953 break;
5954 }
5955 else
5956 break;
5957
5958 /* If we have an unsigned type, we cannot widen the operation since it
5959 will change the result if the original computation overflowed. */
5960 if (TYPE_UNSIGNED (ctype) && ctype != type)
5961 break;
5962
5963 /* If we were able to eliminate our operation from the first side,
5964 apply our operation to the second side and reform the PLUS. */
5965 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
5966 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
5967
5968 /* The last case is if we are a multiply. In that case, we can
5969 apply the distributive law to commute the multiply and addition
5970 if the multiplication of the constants doesn't overflow
5971 and overflow is defined. With undefined overflow
5972 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
5973 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
5974 return fold_build2 (tcode, ctype,
5975 fold_build2 (code, ctype,
5976 fold_convert (ctype, op0),
5977 fold_convert (ctype, c)),
5978 op1);
5979
5980 break;
5981
5982 case MULT_EXPR:
5983 /* We have a special case here if we are doing something like
5984 (C * 8) % 4 since we know that's zero. */
5985 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
5986 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
5987 /* If the multiplication can overflow we cannot optimize this. */
5988 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
5989 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
5990 && integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
5991 {
5992 *strict_overflow_p = true;
5993 return omit_one_operand (type, integer_zero_node, op0);
5994 }
5995
5996 /* ... fall through ... */
5997
5998 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
5999 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6000 /* If we can extract our operation from the LHS, do so and return a
6001 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6002 do something only if the second operand is a constant. */
6003 if (same_p
6004 && (t1 = extract_muldiv (op0, c, code, wide_type,
6005 strict_overflow_p)) != 0)
6006 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6007 fold_convert (ctype, op1));
6008 else if (tcode == MULT_EXPR && code == MULT_EXPR
6009 && (t1 = extract_muldiv (op1, c, code, wide_type,
6010 strict_overflow_p)) != 0)
6011 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6012 fold_convert (ctype, t1));
6013 else if (TREE_CODE (op1) != INTEGER_CST)
6014 return 0;
6015
6016 /* If these are the same operation types, we can associate them
6017 assuming no overflow. */
6018 if (tcode == code)
6019 {
6020 double_int mul;
6021 bool overflow_p;
6022 unsigned prec = TYPE_PRECISION (ctype);
6023 bool uns = TYPE_UNSIGNED (ctype);
6024 double_int diop1 = tree_to_double_int (op1).ext (prec, uns);
6025 double_int dic = tree_to_double_int (c).ext (prec, uns);
6026 mul = diop1.mul_with_sign (dic, false, &overflow_p);
6027 overflow_p = ((!uns && overflow_p)
6028 | TREE_OVERFLOW (c) | TREE_OVERFLOW (op1));
6029 if (!double_int_fits_to_tree_p (ctype, mul)
6030 && ((uns && tcode != MULT_EXPR) || !uns))
6031 overflow_p = 1;
6032 if (!overflow_p)
6033 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6034 double_int_to_tree (ctype, mul));
6035 }
6036
6037 /* If these operations "cancel" each other, we have the main
6038 optimizations of this pass, which occur when either constant is a
6039 multiple of the other, in which case we replace this with either an
6040 operation or CODE or TCODE.
6041
6042 If we have an unsigned type, we cannot do this since it will change
6043 the result if the original computation overflowed. */
6044 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6045 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6046 || (tcode == MULT_EXPR
6047 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6048 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6049 && code != MULT_EXPR)))
6050 {
6051 if (integer_zerop (const_binop (TRUNC_MOD_EXPR, op1, c)))
6052 {
6053 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6054 *strict_overflow_p = true;
6055 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6056 fold_convert (ctype,
6057 const_binop (TRUNC_DIV_EXPR,
6058 op1, c)));
6059 }
6060 else if (integer_zerop (const_binop (TRUNC_MOD_EXPR, c, op1)))
6061 {
6062 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6063 *strict_overflow_p = true;
6064 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6065 fold_convert (ctype,
6066 const_binop (TRUNC_DIV_EXPR,
6067 c, op1)));
6068 }
6069 }
6070 break;
6071
6072 default:
6073 break;
6074 }
6075
6076 return 0;
6077 }
6078 \f
6079 /* Return a node which has the indicated constant VALUE (either 0 or
6080 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6081 and is of the indicated TYPE. */
6082
6083 tree
6084 constant_boolean_node (bool value, tree type)
6085 {
6086 if (type == integer_type_node)
6087 return value ? integer_one_node : integer_zero_node;
6088 else if (type == boolean_type_node)
6089 return value ? boolean_true_node : boolean_false_node;
6090 else if (TREE_CODE (type) == VECTOR_TYPE)
6091 return build_vector_from_val (type,
6092 build_int_cst (TREE_TYPE (type),
6093 value ? -1 : 0));
6094 else
6095 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6096 }
6097
6098
6099 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6100 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6101 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6102 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6103 COND is the first argument to CODE; otherwise (as in the example
6104 given here), it is the second argument. TYPE is the type of the
6105 original expression. Return NULL_TREE if no simplification is
6106 possible. */
6107
6108 static tree
6109 fold_binary_op_with_conditional_arg (location_t loc,
6110 enum tree_code code,
6111 tree type, tree op0, tree op1,
6112 tree cond, tree arg, int cond_first_p)
6113 {
6114 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6115 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6116 tree test, true_value, false_value;
6117 tree lhs = NULL_TREE;
6118 tree rhs = NULL_TREE;
6119 enum tree_code cond_code = COND_EXPR;
6120
6121 if (TREE_CODE (cond) == COND_EXPR
6122 || TREE_CODE (cond) == VEC_COND_EXPR)
6123 {
6124 test = TREE_OPERAND (cond, 0);
6125 true_value = TREE_OPERAND (cond, 1);
6126 false_value = TREE_OPERAND (cond, 2);
6127 /* If this operand throws an expression, then it does not make
6128 sense to try to perform a logical or arithmetic operation
6129 involving it. */
6130 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6131 lhs = true_value;
6132 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6133 rhs = false_value;
6134 }
6135 else
6136 {
6137 tree testtype = TREE_TYPE (cond);
6138 test = cond;
6139 true_value = constant_boolean_node (true, testtype);
6140 false_value = constant_boolean_node (false, testtype);
6141 }
6142
6143 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6144 cond_code = VEC_COND_EXPR;
6145
6146 /* This transformation is only worthwhile if we don't have to wrap ARG
6147 in a SAVE_EXPR and the operation can be simplified without recursing
6148 on at least one of the branches once its pushed inside the COND_EXPR. */
6149 if (!TREE_CONSTANT (arg)
6150 && (TREE_SIDE_EFFECTS (arg)
6151 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6152 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6153 return NULL_TREE;
6154
6155 arg = fold_convert_loc (loc, arg_type, arg);
6156 if (lhs == 0)
6157 {
6158 true_value = fold_convert_loc (loc, cond_type, true_value);
6159 if (cond_first_p)
6160 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6161 else
6162 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6163 }
6164 if (rhs == 0)
6165 {
6166 false_value = fold_convert_loc (loc, cond_type, false_value);
6167 if (cond_first_p)
6168 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6169 else
6170 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6171 }
6172
6173 /* Check that we have simplified at least one of the branches. */
6174 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6175 return NULL_TREE;
6176
6177 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6178 }
6179
6180 \f
6181 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6182
6183 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6184 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6185 ADDEND is the same as X.
6186
6187 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6188 and finite. The problematic cases are when X is zero, and its mode
6189 has signed zeros. In the case of rounding towards -infinity,
6190 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6191 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6192
6193 bool
6194 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6195 {
6196 if (!real_zerop (addend))
6197 return false;
6198
6199 /* Don't allow the fold with -fsignaling-nans. */
6200 if (HONOR_SNANS (TYPE_MODE (type)))
6201 return false;
6202
6203 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6204 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
6205 return true;
6206
6207 /* In a vector or complex, we would need to check the sign of all zeros. */
6208 if (TREE_CODE (addend) != REAL_CST)
6209 return false;
6210
6211 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6212 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6213 negate = !negate;
6214
6215 /* The mode has signed zeros, and we have to honor their sign.
6216 In this situation, there is only one case we can return true for.
6217 X - 0 is the same as X unless rounding towards -infinity is
6218 supported. */
6219 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (type));
6220 }
6221
6222 /* Subroutine of fold() that checks comparisons of built-in math
6223 functions against real constants.
6224
6225 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6226 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6227 is the type of the result and ARG0 and ARG1 are the operands of the
6228 comparison. ARG1 must be a TREE_REAL_CST.
6229
6230 The function returns the constant folded tree if a simplification
6231 can be made, and NULL_TREE otherwise. */
6232
6233 static tree
6234 fold_mathfn_compare (location_t loc,
6235 enum built_in_function fcode, enum tree_code code,
6236 tree type, tree arg0, tree arg1)
6237 {
6238 REAL_VALUE_TYPE c;
6239
6240 if (BUILTIN_SQRT_P (fcode))
6241 {
6242 tree arg = CALL_EXPR_ARG (arg0, 0);
6243 enum machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6244
6245 c = TREE_REAL_CST (arg1);
6246 if (REAL_VALUE_NEGATIVE (c))
6247 {
6248 /* sqrt(x) < y is always false, if y is negative. */
6249 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6250 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6251
6252 /* sqrt(x) > y is always true, if y is negative and we
6253 don't care about NaNs, i.e. negative values of x. */
6254 if (code == NE_EXPR || !HONOR_NANS (mode))
6255 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6256
6257 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6258 return fold_build2_loc (loc, GE_EXPR, type, arg,
6259 build_real (TREE_TYPE (arg), dconst0));
6260 }
6261 else if (code == GT_EXPR || code == GE_EXPR)
6262 {
6263 REAL_VALUE_TYPE c2;
6264
6265 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6266 real_convert (&c2, mode, &c2);
6267
6268 if (REAL_VALUE_ISINF (c2))
6269 {
6270 /* sqrt(x) > y is x == +Inf, when y is very large. */
6271 if (HONOR_INFINITIES (mode))
6272 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6273 build_real (TREE_TYPE (arg), c2));
6274
6275 /* sqrt(x) > y is always false, when y is very large
6276 and we don't care about infinities. */
6277 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6278 }
6279
6280 /* sqrt(x) > c is the same as x > c*c. */
6281 return fold_build2_loc (loc, code, type, arg,
6282 build_real (TREE_TYPE (arg), c2));
6283 }
6284 else if (code == LT_EXPR || code == LE_EXPR)
6285 {
6286 REAL_VALUE_TYPE c2;
6287
6288 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6289 real_convert (&c2, mode, &c2);
6290
6291 if (REAL_VALUE_ISINF (c2))
6292 {
6293 /* sqrt(x) < y is always true, when y is a very large
6294 value and we don't care about NaNs or Infinities. */
6295 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6296 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6297
6298 /* sqrt(x) < y is x != +Inf when y is very large and we
6299 don't care about NaNs. */
6300 if (! HONOR_NANS (mode))
6301 return fold_build2_loc (loc, NE_EXPR, type, arg,
6302 build_real (TREE_TYPE (arg), c2));
6303
6304 /* sqrt(x) < y is x >= 0 when y is very large and we
6305 don't care about Infinities. */
6306 if (! HONOR_INFINITIES (mode))
6307 return fold_build2_loc (loc, GE_EXPR, type, arg,
6308 build_real (TREE_TYPE (arg), dconst0));
6309
6310 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6311 arg = save_expr (arg);
6312 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6313 fold_build2_loc (loc, GE_EXPR, type, arg,
6314 build_real (TREE_TYPE (arg),
6315 dconst0)),
6316 fold_build2_loc (loc, NE_EXPR, type, arg,
6317 build_real (TREE_TYPE (arg),
6318 c2)));
6319 }
6320
6321 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6322 if (! HONOR_NANS (mode))
6323 return fold_build2_loc (loc, code, type, arg,
6324 build_real (TREE_TYPE (arg), c2));
6325
6326 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6327 arg = save_expr (arg);
6328 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6329 fold_build2_loc (loc, GE_EXPR, type, arg,
6330 build_real (TREE_TYPE (arg),
6331 dconst0)),
6332 fold_build2_loc (loc, code, type, arg,
6333 build_real (TREE_TYPE (arg),
6334 c2)));
6335 }
6336 }
6337
6338 return NULL_TREE;
6339 }
6340
6341 /* Subroutine of fold() that optimizes comparisons against Infinities,
6342 either +Inf or -Inf.
6343
6344 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6345 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6346 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6347
6348 The function returns the constant folded tree if a simplification
6349 can be made, and NULL_TREE otherwise. */
6350
6351 static tree
6352 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6353 tree arg0, tree arg1)
6354 {
6355 enum machine_mode mode;
6356 REAL_VALUE_TYPE max;
6357 tree temp;
6358 bool neg;
6359
6360 mode = TYPE_MODE (TREE_TYPE (arg0));
6361
6362 /* For negative infinity swap the sense of the comparison. */
6363 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6364 if (neg)
6365 code = swap_tree_comparison (code);
6366
6367 switch (code)
6368 {
6369 case GT_EXPR:
6370 /* x > +Inf is always false, if with ignore sNANs. */
6371 if (HONOR_SNANS (mode))
6372 return NULL_TREE;
6373 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6374
6375 case LE_EXPR:
6376 /* x <= +Inf is always true, if we don't case about NaNs. */
6377 if (! HONOR_NANS (mode))
6378 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6379
6380 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6381 arg0 = save_expr (arg0);
6382 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6383
6384 case EQ_EXPR:
6385 case GE_EXPR:
6386 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6387 real_maxval (&max, neg, mode);
6388 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6389 arg0, build_real (TREE_TYPE (arg0), max));
6390
6391 case LT_EXPR:
6392 /* x < +Inf is always equal to x <= DBL_MAX. */
6393 real_maxval (&max, neg, mode);
6394 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6395 arg0, build_real (TREE_TYPE (arg0), max));
6396
6397 case NE_EXPR:
6398 /* x != +Inf is always equal to !(x > DBL_MAX). */
6399 real_maxval (&max, neg, mode);
6400 if (! HONOR_NANS (mode))
6401 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6402 arg0, build_real (TREE_TYPE (arg0), max));
6403
6404 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6405 arg0, build_real (TREE_TYPE (arg0), max));
6406 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6407
6408 default:
6409 break;
6410 }
6411
6412 return NULL_TREE;
6413 }
6414
6415 /* Subroutine of fold() that optimizes comparisons of a division by
6416 a nonzero integer constant against an integer constant, i.e.
6417 X/C1 op C2.
6418
6419 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6420 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6421 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6422
6423 The function returns the constant folded tree if a simplification
6424 can be made, and NULL_TREE otherwise. */
6425
6426 static tree
6427 fold_div_compare (location_t loc,
6428 enum tree_code code, tree type, tree arg0, tree arg1)
6429 {
6430 tree prod, tmp, hi, lo;
6431 tree arg00 = TREE_OPERAND (arg0, 0);
6432 tree arg01 = TREE_OPERAND (arg0, 1);
6433 double_int val;
6434 bool unsigned_p = TYPE_UNSIGNED (TREE_TYPE (arg0));
6435 bool neg_overflow;
6436 bool overflow;
6437
6438 /* We have to do this the hard way to detect unsigned overflow.
6439 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6440 val = TREE_INT_CST (arg01)
6441 .mul_with_sign (TREE_INT_CST (arg1), unsigned_p, &overflow);
6442 prod = force_fit_type_double (TREE_TYPE (arg00), val, -1, overflow);
6443 neg_overflow = false;
6444
6445 if (unsigned_p)
6446 {
6447 tmp = int_const_binop (MINUS_EXPR, arg01,
6448 build_int_cst (TREE_TYPE (arg01), 1));
6449 lo = prod;
6450
6451 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6452 val = TREE_INT_CST (prod)
6453 .add_with_sign (TREE_INT_CST (tmp), unsigned_p, &overflow);
6454 hi = force_fit_type_double (TREE_TYPE (arg00), val,
6455 -1, overflow | TREE_OVERFLOW (prod));
6456 }
6457 else if (tree_int_cst_sgn (arg01) >= 0)
6458 {
6459 tmp = int_const_binop (MINUS_EXPR, arg01,
6460 build_int_cst (TREE_TYPE (arg01), 1));
6461 switch (tree_int_cst_sgn (arg1))
6462 {
6463 case -1:
6464 neg_overflow = true;
6465 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6466 hi = prod;
6467 break;
6468
6469 case 0:
6470 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6471 hi = tmp;
6472 break;
6473
6474 case 1:
6475 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6476 lo = prod;
6477 break;
6478
6479 default:
6480 gcc_unreachable ();
6481 }
6482 }
6483 else
6484 {
6485 /* A negative divisor reverses the relational operators. */
6486 code = swap_tree_comparison (code);
6487
6488 tmp = int_const_binop (PLUS_EXPR, arg01,
6489 build_int_cst (TREE_TYPE (arg01), 1));
6490 switch (tree_int_cst_sgn (arg1))
6491 {
6492 case -1:
6493 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6494 lo = prod;
6495 break;
6496
6497 case 0:
6498 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6499 lo = tmp;
6500 break;
6501
6502 case 1:
6503 neg_overflow = true;
6504 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6505 hi = prod;
6506 break;
6507
6508 default:
6509 gcc_unreachable ();
6510 }
6511 }
6512
6513 switch (code)
6514 {
6515 case EQ_EXPR:
6516 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6517 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6518 if (TREE_OVERFLOW (hi))
6519 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6520 if (TREE_OVERFLOW (lo))
6521 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6522 return build_range_check (loc, type, arg00, 1, lo, hi);
6523
6524 case NE_EXPR:
6525 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6526 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6527 if (TREE_OVERFLOW (hi))
6528 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6529 if (TREE_OVERFLOW (lo))
6530 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6531 return build_range_check (loc, type, arg00, 0, lo, hi);
6532
6533 case LT_EXPR:
6534 if (TREE_OVERFLOW (lo))
6535 {
6536 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6537 return omit_one_operand_loc (loc, type, tmp, arg00);
6538 }
6539 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6540
6541 case LE_EXPR:
6542 if (TREE_OVERFLOW (hi))
6543 {
6544 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6545 return omit_one_operand_loc (loc, type, tmp, arg00);
6546 }
6547 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6548
6549 case GT_EXPR:
6550 if (TREE_OVERFLOW (hi))
6551 {
6552 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6553 return omit_one_operand_loc (loc, type, tmp, arg00);
6554 }
6555 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6556
6557 case GE_EXPR:
6558 if (TREE_OVERFLOW (lo))
6559 {
6560 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6561 return omit_one_operand_loc (loc, type, tmp, arg00);
6562 }
6563 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6564
6565 default:
6566 break;
6567 }
6568
6569 return NULL_TREE;
6570 }
6571
6572
6573 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6574 equality/inequality test, then return a simplified form of the test
6575 using a sign testing. Otherwise return NULL. TYPE is the desired
6576 result type. */
6577
6578 static tree
6579 fold_single_bit_test_into_sign_test (location_t loc,
6580 enum tree_code code, tree arg0, tree arg1,
6581 tree result_type)
6582 {
6583 /* If this is testing a single bit, we can optimize the test. */
6584 if ((code == NE_EXPR || code == EQ_EXPR)
6585 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6586 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6587 {
6588 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6589 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6590 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6591
6592 if (arg00 != NULL_TREE
6593 /* This is only a win if casting to a signed type is cheap,
6594 i.e. when arg00's type is not a partial mode. */
6595 && TYPE_PRECISION (TREE_TYPE (arg00))
6596 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg00))))
6597 {
6598 tree stype = signed_type_for (TREE_TYPE (arg00));
6599 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6600 result_type,
6601 fold_convert_loc (loc, stype, arg00),
6602 build_int_cst (stype, 0));
6603 }
6604 }
6605
6606 return NULL_TREE;
6607 }
6608
6609 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6610 equality/inequality test, then return a simplified form of
6611 the test using shifts and logical operations. Otherwise return
6612 NULL. TYPE is the desired result type. */
6613
6614 tree
6615 fold_single_bit_test (location_t loc, enum tree_code code,
6616 tree arg0, tree arg1, tree result_type)
6617 {
6618 /* If this is testing a single bit, we can optimize the test. */
6619 if ((code == NE_EXPR || code == EQ_EXPR)
6620 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6621 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6622 {
6623 tree inner = TREE_OPERAND (arg0, 0);
6624 tree type = TREE_TYPE (arg0);
6625 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6626 enum machine_mode operand_mode = TYPE_MODE (type);
6627 int ops_unsigned;
6628 tree signed_type, unsigned_type, intermediate_type;
6629 tree tem, one;
6630
6631 /* First, see if we can fold the single bit test into a sign-bit
6632 test. */
6633 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6634 result_type);
6635 if (tem)
6636 return tem;
6637
6638 /* Otherwise we have (A & C) != 0 where C is a single bit,
6639 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6640 Similarly for (A & C) == 0. */
6641
6642 /* If INNER is a right shift of a constant and it plus BITNUM does
6643 not overflow, adjust BITNUM and INNER. */
6644 if (TREE_CODE (inner) == RSHIFT_EXPR
6645 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6646 && host_integerp (TREE_OPERAND (inner, 1), 1)
6647 && bitnum < TYPE_PRECISION (type)
6648 && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1))
6649 < (unsigned) (TYPE_PRECISION (type) - bitnum)))
6650 {
6651 bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1));
6652 inner = TREE_OPERAND (inner, 0);
6653 }
6654
6655 /* If we are going to be able to omit the AND below, we must do our
6656 operations as unsigned. If we must use the AND, we have a choice.
6657 Normally unsigned is faster, but for some machines signed is. */
6658 #ifdef LOAD_EXTEND_OP
6659 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6660 && !flag_syntax_only) ? 0 : 1;
6661 #else
6662 ops_unsigned = 1;
6663 #endif
6664
6665 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6666 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6667 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6668 inner = fold_convert_loc (loc, intermediate_type, inner);
6669
6670 if (bitnum != 0)
6671 inner = build2 (RSHIFT_EXPR, intermediate_type,
6672 inner, size_int (bitnum));
6673
6674 one = build_int_cst (intermediate_type, 1);
6675
6676 if (code == EQ_EXPR)
6677 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6678
6679 /* Put the AND last so it can combine with more things. */
6680 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6681
6682 /* Make sure to return the proper type. */
6683 inner = fold_convert_loc (loc, result_type, inner);
6684
6685 return inner;
6686 }
6687 return NULL_TREE;
6688 }
6689
6690 /* Check whether we are allowed to reorder operands arg0 and arg1,
6691 such that the evaluation of arg1 occurs before arg0. */
6692
6693 static bool
6694 reorder_operands_p (const_tree arg0, const_tree arg1)
6695 {
6696 if (! flag_evaluation_order)
6697 return true;
6698 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6699 return true;
6700 return ! TREE_SIDE_EFFECTS (arg0)
6701 && ! TREE_SIDE_EFFECTS (arg1);
6702 }
6703
6704 /* Test whether it is preferable two swap two operands, ARG0 and
6705 ARG1, for example because ARG0 is an integer constant and ARG1
6706 isn't. If REORDER is true, only recommend swapping if we can
6707 evaluate the operands in reverse order. */
6708
6709 bool
6710 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6711 {
6712 STRIP_SIGN_NOPS (arg0);
6713 STRIP_SIGN_NOPS (arg1);
6714
6715 if (TREE_CODE (arg1) == INTEGER_CST)
6716 return 0;
6717 if (TREE_CODE (arg0) == INTEGER_CST)
6718 return 1;
6719
6720 if (TREE_CODE (arg1) == REAL_CST)
6721 return 0;
6722 if (TREE_CODE (arg0) == REAL_CST)
6723 return 1;
6724
6725 if (TREE_CODE (arg1) == FIXED_CST)
6726 return 0;
6727 if (TREE_CODE (arg0) == FIXED_CST)
6728 return 1;
6729
6730 if (TREE_CODE (arg1) == COMPLEX_CST)
6731 return 0;
6732 if (TREE_CODE (arg0) == COMPLEX_CST)
6733 return 1;
6734
6735 if (TREE_CONSTANT (arg1))
6736 return 0;
6737 if (TREE_CONSTANT (arg0))
6738 return 1;
6739
6740 if (optimize_function_for_size_p (cfun))
6741 return 0;
6742
6743 if (reorder && flag_evaluation_order
6744 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6745 return 0;
6746
6747 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6748 for commutative and comparison operators. Ensuring a canonical
6749 form allows the optimizers to find additional redundancies without
6750 having to explicitly check for both orderings. */
6751 if (TREE_CODE (arg0) == SSA_NAME
6752 && TREE_CODE (arg1) == SSA_NAME
6753 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6754 return 1;
6755
6756 /* Put SSA_NAMEs last. */
6757 if (TREE_CODE (arg1) == SSA_NAME)
6758 return 0;
6759 if (TREE_CODE (arg0) == SSA_NAME)
6760 return 1;
6761
6762 /* Put variables last. */
6763 if (DECL_P (arg1))
6764 return 0;
6765 if (DECL_P (arg0))
6766 return 1;
6767
6768 return 0;
6769 }
6770
6771 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6772 ARG0 is extended to a wider type. */
6773
6774 static tree
6775 fold_widened_comparison (location_t loc, enum tree_code code,
6776 tree type, tree arg0, tree arg1)
6777 {
6778 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6779 tree arg1_unw;
6780 tree shorter_type, outer_type;
6781 tree min, max;
6782 bool above, below;
6783
6784 if (arg0_unw == arg0)
6785 return NULL_TREE;
6786 shorter_type = TREE_TYPE (arg0_unw);
6787
6788 #ifdef HAVE_canonicalize_funcptr_for_compare
6789 /* Disable this optimization if we're casting a function pointer
6790 type on targets that require function pointer canonicalization. */
6791 if (HAVE_canonicalize_funcptr_for_compare
6792 && TREE_CODE (shorter_type) == POINTER_TYPE
6793 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6794 return NULL_TREE;
6795 #endif
6796
6797 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6798 return NULL_TREE;
6799
6800 arg1_unw = get_unwidened (arg1, NULL_TREE);
6801
6802 /* If possible, express the comparison in the shorter mode. */
6803 if ((code == EQ_EXPR || code == NE_EXPR
6804 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6805 && (TREE_TYPE (arg1_unw) == shorter_type
6806 || ((TYPE_PRECISION (shorter_type)
6807 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6808 && (TYPE_UNSIGNED (shorter_type)
6809 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6810 || (TREE_CODE (arg1_unw) == INTEGER_CST
6811 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6812 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6813 && int_fits_type_p (arg1_unw, shorter_type))))
6814 return fold_build2_loc (loc, code, type, arg0_unw,
6815 fold_convert_loc (loc, shorter_type, arg1_unw));
6816
6817 if (TREE_CODE (arg1_unw) != INTEGER_CST
6818 || TREE_CODE (shorter_type) != INTEGER_TYPE
6819 || !int_fits_type_p (arg1_unw, shorter_type))
6820 return NULL_TREE;
6821
6822 /* If we are comparing with the integer that does not fit into the range
6823 of the shorter type, the result is known. */
6824 outer_type = TREE_TYPE (arg1_unw);
6825 min = lower_bound_in_type (outer_type, shorter_type);
6826 max = upper_bound_in_type (outer_type, shorter_type);
6827
6828 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6829 max, arg1_unw));
6830 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
6831 arg1_unw, min));
6832
6833 switch (code)
6834 {
6835 case EQ_EXPR:
6836 if (above || below)
6837 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6838 break;
6839
6840 case NE_EXPR:
6841 if (above || below)
6842 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6843 break;
6844
6845 case LT_EXPR:
6846 case LE_EXPR:
6847 if (above)
6848 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6849 else if (below)
6850 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6851
6852 case GT_EXPR:
6853 case GE_EXPR:
6854 if (above)
6855 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6856 else if (below)
6857 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6858
6859 default:
6860 break;
6861 }
6862
6863 return NULL_TREE;
6864 }
6865
6866 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
6867 ARG0 just the signedness is changed. */
6868
6869 static tree
6870 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
6871 tree arg0, tree arg1)
6872 {
6873 tree arg0_inner;
6874 tree inner_type, outer_type;
6875
6876 if (!CONVERT_EXPR_P (arg0))
6877 return NULL_TREE;
6878
6879 outer_type = TREE_TYPE (arg0);
6880 arg0_inner = TREE_OPERAND (arg0, 0);
6881 inner_type = TREE_TYPE (arg0_inner);
6882
6883 #ifdef HAVE_canonicalize_funcptr_for_compare
6884 /* Disable this optimization if we're casting a function pointer
6885 type on targets that require function pointer canonicalization. */
6886 if (HAVE_canonicalize_funcptr_for_compare
6887 && TREE_CODE (inner_type) == POINTER_TYPE
6888 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
6889 return NULL_TREE;
6890 #endif
6891
6892 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
6893 return NULL_TREE;
6894
6895 if (TREE_CODE (arg1) != INTEGER_CST
6896 && !(CONVERT_EXPR_P (arg1)
6897 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
6898 return NULL_TREE;
6899
6900 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
6901 && code != NE_EXPR
6902 && code != EQ_EXPR)
6903 return NULL_TREE;
6904
6905 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
6906 return NULL_TREE;
6907
6908 if (TREE_CODE (arg1) == INTEGER_CST)
6909 arg1 = force_fit_type_double (inner_type, tree_to_double_int (arg1),
6910 0, TREE_OVERFLOW (arg1));
6911 else
6912 arg1 = fold_convert_loc (loc, inner_type, arg1);
6913
6914 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
6915 }
6916
6917 /* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
6918 step of the array. Reconstructs s and delta in the case of s *
6919 delta being an integer constant (and thus already folded). ADDR is
6920 the address. MULT is the multiplicative expression. If the
6921 function succeeds, the new address expression is returned.
6922 Otherwise NULL_TREE is returned. LOC is the location of the
6923 resulting expression. */
6924
6925 static tree
6926 try_move_mult_to_index (location_t loc, tree addr, tree op1)
6927 {
6928 tree s, delta, step;
6929 tree ref = TREE_OPERAND (addr, 0), pref;
6930 tree ret, pos;
6931 tree itype;
6932 bool mdim = false;
6933
6934 /* Strip the nops that might be added when converting op1 to sizetype. */
6935 STRIP_NOPS (op1);
6936
6937 /* Canonicalize op1 into a possibly non-constant delta
6938 and an INTEGER_CST s. */
6939 if (TREE_CODE (op1) == MULT_EXPR)
6940 {
6941 tree arg0 = TREE_OPERAND (op1, 0), arg1 = TREE_OPERAND (op1, 1);
6942
6943 STRIP_NOPS (arg0);
6944 STRIP_NOPS (arg1);
6945
6946 if (TREE_CODE (arg0) == INTEGER_CST)
6947 {
6948 s = arg0;
6949 delta = arg1;
6950 }
6951 else if (TREE_CODE (arg1) == INTEGER_CST)
6952 {
6953 s = arg1;
6954 delta = arg0;
6955 }
6956 else
6957 return NULL_TREE;
6958 }
6959 else if (TREE_CODE (op1) == INTEGER_CST)
6960 {
6961 delta = op1;
6962 s = NULL_TREE;
6963 }
6964 else
6965 {
6966 /* Simulate we are delta * 1. */
6967 delta = op1;
6968 s = integer_one_node;
6969 }
6970
6971 /* Handle &x.array the same as we would handle &x.array[0]. */
6972 if (TREE_CODE (ref) == COMPONENT_REF
6973 && TREE_CODE (TREE_TYPE (ref)) == ARRAY_TYPE)
6974 {
6975 tree domain;
6976
6977 /* Remember if this was a multi-dimensional array. */
6978 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
6979 mdim = true;
6980
6981 domain = TYPE_DOMAIN (TREE_TYPE (ref));
6982 if (! domain)
6983 goto cont;
6984 itype = TREE_TYPE (domain);
6985
6986 step = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (ref)));
6987 if (TREE_CODE (step) != INTEGER_CST)
6988 goto cont;
6989
6990 if (s)
6991 {
6992 if (! tree_int_cst_equal (step, s))
6993 goto cont;
6994 }
6995 else
6996 {
6997 /* Try if delta is a multiple of step. */
6998 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
6999 if (! tmp)
7000 goto cont;
7001 delta = tmp;
7002 }
7003
7004 /* Only fold here if we can verify we do not overflow one
7005 dimension of a multi-dimensional array. */
7006 if (mdim)
7007 {
7008 tree tmp;
7009
7010 if (!TYPE_MIN_VALUE (domain)
7011 || !TYPE_MAX_VALUE (domain)
7012 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7013 goto cont;
7014
7015 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7016 fold_convert_loc (loc, itype,
7017 TYPE_MIN_VALUE (domain)),
7018 fold_convert_loc (loc, itype, delta));
7019 if (TREE_CODE (tmp) != INTEGER_CST
7020 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7021 goto cont;
7022 }
7023
7024 /* We found a suitable component reference. */
7025
7026 pref = TREE_OPERAND (addr, 0);
7027 ret = copy_node (pref);
7028 SET_EXPR_LOCATION (ret, loc);
7029
7030 ret = build4_loc (loc, ARRAY_REF, TREE_TYPE (TREE_TYPE (ref)), ret,
7031 fold_build2_loc
7032 (loc, PLUS_EXPR, itype,
7033 fold_convert_loc (loc, itype,
7034 TYPE_MIN_VALUE
7035 (TYPE_DOMAIN (TREE_TYPE (ref)))),
7036 fold_convert_loc (loc, itype, delta)),
7037 NULL_TREE, NULL_TREE);
7038 return build_fold_addr_expr_loc (loc, ret);
7039 }
7040
7041 cont:
7042
7043 for (;; ref = TREE_OPERAND (ref, 0))
7044 {
7045 if (TREE_CODE (ref) == ARRAY_REF)
7046 {
7047 tree domain;
7048
7049 /* Remember if this was a multi-dimensional array. */
7050 if (TREE_CODE (TREE_OPERAND (ref, 0)) == ARRAY_REF)
7051 mdim = true;
7052
7053 domain = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (ref, 0)));
7054 if (! domain)
7055 continue;
7056 itype = TREE_TYPE (domain);
7057
7058 step = array_ref_element_size (ref);
7059 if (TREE_CODE (step) != INTEGER_CST)
7060 continue;
7061
7062 if (s)
7063 {
7064 if (! tree_int_cst_equal (step, s))
7065 continue;
7066 }
7067 else
7068 {
7069 /* Try if delta is a multiple of step. */
7070 tree tmp = div_if_zero_remainder (EXACT_DIV_EXPR, op1, step);
7071 if (! tmp)
7072 continue;
7073 delta = tmp;
7074 }
7075
7076 /* Only fold here if we can verify we do not overflow one
7077 dimension of a multi-dimensional array. */
7078 if (mdim)
7079 {
7080 tree tmp;
7081
7082 if (TREE_CODE (TREE_OPERAND (ref, 1)) != INTEGER_CST
7083 || !TYPE_MAX_VALUE (domain)
7084 || TREE_CODE (TYPE_MAX_VALUE (domain)) != INTEGER_CST)
7085 continue;
7086
7087 tmp = fold_binary_loc (loc, PLUS_EXPR, itype,
7088 fold_convert_loc (loc, itype,
7089 TREE_OPERAND (ref, 1)),
7090 fold_convert_loc (loc, itype, delta));
7091 if (!tmp
7092 || TREE_CODE (tmp) != INTEGER_CST
7093 || tree_int_cst_lt (TYPE_MAX_VALUE (domain), tmp))
7094 continue;
7095 }
7096
7097 break;
7098 }
7099 else
7100 mdim = false;
7101
7102 if (!handled_component_p (ref))
7103 return NULL_TREE;
7104 }
7105
7106 /* We found the suitable array reference. So copy everything up to it,
7107 and replace the index. */
7108
7109 pref = TREE_OPERAND (addr, 0);
7110 ret = copy_node (pref);
7111 SET_EXPR_LOCATION (ret, loc);
7112 pos = ret;
7113
7114 while (pref != ref)
7115 {
7116 pref = TREE_OPERAND (pref, 0);
7117 TREE_OPERAND (pos, 0) = copy_node (pref);
7118 pos = TREE_OPERAND (pos, 0);
7119 }
7120
7121 TREE_OPERAND (pos, 1)
7122 = fold_build2_loc (loc, PLUS_EXPR, itype,
7123 fold_convert_loc (loc, itype, TREE_OPERAND (pos, 1)),
7124 fold_convert_loc (loc, itype, delta));
7125 return fold_build1_loc (loc, ADDR_EXPR, TREE_TYPE (addr), ret);
7126 }
7127
7128
7129 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7130 means A >= Y && A != MAX, but in this case we know that
7131 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7132
7133 static tree
7134 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7135 {
7136 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7137
7138 if (TREE_CODE (bound) == LT_EXPR)
7139 a = TREE_OPERAND (bound, 0);
7140 else if (TREE_CODE (bound) == GT_EXPR)
7141 a = TREE_OPERAND (bound, 1);
7142 else
7143 return NULL_TREE;
7144
7145 typea = TREE_TYPE (a);
7146 if (!INTEGRAL_TYPE_P (typea)
7147 && !POINTER_TYPE_P (typea))
7148 return NULL_TREE;
7149
7150 if (TREE_CODE (ineq) == LT_EXPR)
7151 {
7152 a1 = TREE_OPERAND (ineq, 1);
7153 y = TREE_OPERAND (ineq, 0);
7154 }
7155 else if (TREE_CODE (ineq) == GT_EXPR)
7156 {
7157 a1 = TREE_OPERAND (ineq, 0);
7158 y = TREE_OPERAND (ineq, 1);
7159 }
7160 else
7161 return NULL_TREE;
7162
7163 if (TREE_TYPE (a1) != typea)
7164 return NULL_TREE;
7165
7166 if (POINTER_TYPE_P (typea))
7167 {
7168 /* Convert the pointer types into integer before taking the difference. */
7169 tree ta = fold_convert_loc (loc, ssizetype, a);
7170 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7171 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7172 }
7173 else
7174 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7175
7176 if (!diff || !integer_onep (diff))
7177 return NULL_TREE;
7178
7179 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7180 }
7181
7182 /* Fold a sum or difference of at least one multiplication.
7183 Returns the folded tree or NULL if no simplification could be made. */
7184
7185 static tree
7186 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7187 tree arg0, tree arg1)
7188 {
7189 tree arg00, arg01, arg10, arg11;
7190 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7191
7192 /* (A * C) +- (B * C) -> (A+-B) * C.
7193 (A * C) +- A -> A * (C+-1).
7194 We are most concerned about the case where C is a constant,
7195 but other combinations show up during loop reduction. Since
7196 it is not difficult, try all four possibilities. */
7197
7198 if (TREE_CODE (arg0) == MULT_EXPR)
7199 {
7200 arg00 = TREE_OPERAND (arg0, 0);
7201 arg01 = TREE_OPERAND (arg0, 1);
7202 }
7203 else if (TREE_CODE (arg0) == INTEGER_CST)
7204 {
7205 arg00 = build_one_cst (type);
7206 arg01 = arg0;
7207 }
7208 else
7209 {
7210 /* We cannot generate constant 1 for fract. */
7211 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7212 return NULL_TREE;
7213 arg00 = arg0;
7214 arg01 = build_one_cst (type);
7215 }
7216 if (TREE_CODE (arg1) == MULT_EXPR)
7217 {
7218 arg10 = TREE_OPERAND (arg1, 0);
7219 arg11 = TREE_OPERAND (arg1, 1);
7220 }
7221 else if (TREE_CODE (arg1) == INTEGER_CST)
7222 {
7223 arg10 = build_one_cst (type);
7224 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7225 the purpose of this canonicalization. */
7226 if (TREE_INT_CST_HIGH (arg1) == -1
7227 && negate_expr_p (arg1)
7228 && code == PLUS_EXPR)
7229 {
7230 arg11 = negate_expr (arg1);
7231 code = MINUS_EXPR;
7232 }
7233 else
7234 arg11 = arg1;
7235 }
7236 else
7237 {
7238 /* We cannot generate constant 1 for fract. */
7239 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7240 return NULL_TREE;
7241 arg10 = arg1;
7242 arg11 = build_one_cst (type);
7243 }
7244 same = NULL_TREE;
7245
7246 if (operand_equal_p (arg01, arg11, 0))
7247 same = arg01, alt0 = arg00, alt1 = arg10;
7248 else if (operand_equal_p (arg00, arg10, 0))
7249 same = arg00, alt0 = arg01, alt1 = arg11;
7250 else if (operand_equal_p (arg00, arg11, 0))
7251 same = arg00, alt0 = arg01, alt1 = arg10;
7252 else if (operand_equal_p (arg01, arg10, 0))
7253 same = arg01, alt0 = arg00, alt1 = arg11;
7254
7255 /* No identical multiplicands; see if we can find a common
7256 power-of-two factor in non-power-of-two multiplies. This
7257 can help in multi-dimensional array access. */
7258 else if (host_integerp (arg01, 0)
7259 && host_integerp (arg11, 0))
7260 {
7261 HOST_WIDE_INT int01, int11, tmp;
7262 bool swap = false;
7263 tree maybe_same;
7264 int01 = TREE_INT_CST_LOW (arg01);
7265 int11 = TREE_INT_CST_LOW (arg11);
7266
7267 /* Move min of absolute values to int11. */
7268 if (absu_hwi (int01) < absu_hwi (int11))
7269 {
7270 tmp = int01, int01 = int11, int11 = tmp;
7271 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7272 maybe_same = arg01;
7273 swap = true;
7274 }
7275 else
7276 maybe_same = arg11;
7277
7278 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7279 /* The remainder should not be a constant, otherwise we
7280 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7281 increased the number of multiplications necessary. */
7282 && TREE_CODE (arg10) != INTEGER_CST)
7283 {
7284 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7285 build_int_cst (TREE_TYPE (arg00),
7286 int01 / int11));
7287 alt1 = arg10;
7288 same = maybe_same;
7289 if (swap)
7290 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7291 }
7292 }
7293
7294 if (same)
7295 return fold_build2_loc (loc, MULT_EXPR, type,
7296 fold_build2_loc (loc, code, type,
7297 fold_convert_loc (loc, type, alt0),
7298 fold_convert_loc (loc, type, alt1)),
7299 fold_convert_loc (loc, type, same));
7300
7301 return NULL_TREE;
7302 }
7303
7304 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7305 specified by EXPR into the buffer PTR of length LEN bytes.
7306 Return the number of bytes placed in the buffer, or zero
7307 upon failure. */
7308
7309 static int
7310 native_encode_int (const_tree expr, unsigned char *ptr, int len)
7311 {
7312 tree type = TREE_TYPE (expr);
7313 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7314 int byte, offset, word, words;
7315 unsigned char value;
7316
7317 if (total_bytes > len)
7318 return 0;
7319 words = total_bytes / UNITS_PER_WORD;
7320
7321 for (byte = 0; byte < total_bytes; byte++)
7322 {
7323 int bitpos = byte * BITS_PER_UNIT;
7324 if (bitpos < HOST_BITS_PER_WIDE_INT)
7325 value = (unsigned char) (TREE_INT_CST_LOW (expr) >> bitpos);
7326 else
7327 value = (unsigned char) (TREE_INT_CST_HIGH (expr)
7328 >> (bitpos - HOST_BITS_PER_WIDE_INT));
7329
7330 if (total_bytes > UNITS_PER_WORD)
7331 {
7332 word = byte / UNITS_PER_WORD;
7333 if (WORDS_BIG_ENDIAN)
7334 word = (words - 1) - word;
7335 offset = word * UNITS_PER_WORD;
7336 if (BYTES_BIG_ENDIAN)
7337 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7338 else
7339 offset += byte % UNITS_PER_WORD;
7340 }
7341 else
7342 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7343 ptr[offset] = value;
7344 }
7345 return total_bytes;
7346 }
7347
7348
7349 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7350 specified by EXPR into the buffer PTR of length LEN bytes.
7351 Return the number of bytes placed in the buffer, or zero
7352 upon failure. */
7353
7354 static int
7355 native_encode_fixed (const_tree expr, unsigned char *ptr, int len)
7356 {
7357 tree type = TREE_TYPE (expr);
7358 enum machine_mode mode = TYPE_MODE (type);
7359 int total_bytes = GET_MODE_SIZE (mode);
7360 FIXED_VALUE_TYPE value;
7361 tree i_value, i_type;
7362
7363 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7364 return 0;
7365
7366 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7367
7368 if (NULL_TREE == i_type
7369 || TYPE_PRECISION (i_type) != total_bytes)
7370 return 0;
7371
7372 value = TREE_FIXED_CST (expr);
7373 i_value = double_int_to_tree (i_type, value.data);
7374
7375 return native_encode_int (i_value, ptr, len);
7376 }
7377
7378
7379 /* Subroutine of native_encode_expr. Encode the REAL_CST
7380 specified by EXPR into the buffer PTR of length LEN bytes.
7381 Return the number of bytes placed in the buffer, or zero
7382 upon failure. */
7383
7384 static int
7385 native_encode_real (const_tree expr, unsigned char *ptr, int len)
7386 {
7387 tree type = TREE_TYPE (expr);
7388 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7389 int byte, offset, word, words, bitpos;
7390 unsigned char value;
7391
7392 /* There are always 32 bits in each long, no matter the size of
7393 the hosts long. We handle floating point representations with
7394 up to 192 bits. */
7395 long tmp[6];
7396
7397 if (total_bytes > len)
7398 return 0;
7399 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7400
7401 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7402
7403 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7404 bitpos += BITS_PER_UNIT)
7405 {
7406 byte = (bitpos / BITS_PER_UNIT) & 3;
7407 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7408
7409 if (UNITS_PER_WORD < 4)
7410 {
7411 word = byte / UNITS_PER_WORD;
7412 if (WORDS_BIG_ENDIAN)
7413 word = (words - 1) - word;
7414 offset = word * UNITS_PER_WORD;
7415 if (BYTES_BIG_ENDIAN)
7416 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7417 else
7418 offset += byte % UNITS_PER_WORD;
7419 }
7420 else
7421 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7422 ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)] = value;
7423 }
7424 return total_bytes;
7425 }
7426
7427 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7428 specified by EXPR into the buffer PTR of length LEN bytes.
7429 Return the number of bytes placed in the buffer, or zero
7430 upon failure. */
7431
7432 static int
7433 native_encode_complex (const_tree expr, unsigned char *ptr, int len)
7434 {
7435 int rsize, isize;
7436 tree part;
7437
7438 part = TREE_REALPART (expr);
7439 rsize = native_encode_expr (part, ptr, len);
7440 if (rsize == 0)
7441 return 0;
7442 part = TREE_IMAGPART (expr);
7443 isize = native_encode_expr (part, ptr+rsize, len-rsize);
7444 if (isize != rsize)
7445 return 0;
7446 return rsize + isize;
7447 }
7448
7449
7450 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7451 specified by EXPR into the buffer PTR of length LEN bytes.
7452 Return the number of bytes placed in the buffer, or zero
7453 upon failure. */
7454
7455 static int
7456 native_encode_vector (const_tree expr, unsigned char *ptr, int len)
7457 {
7458 unsigned i, count;
7459 int size, offset;
7460 tree itype, elem;
7461
7462 offset = 0;
7463 count = VECTOR_CST_NELTS (expr);
7464 itype = TREE_TYPE (TREE_TYPE (expr));
7465 size = GET_MODE_SIZE (TYPE_MODE (itype));
7466 for (i = 0; i < count; i++)
7467 {
7468 elem = VECTOR_CST_ELT (expr, i);
7469 if (native_encode_expr (elem, ptr+offset, len-offset) != size)
7470 return 0;
7471 offset += size;
7472 }
7473 return offset;
7474 }
7475
7476
7477 /* Subroutine of native_encode_expr. Encode the STRING_CST
7478 specified by EXPR into the buffer PTR of length LEN bytes.
7479 Return the number of bytes placed in the buffer, or zero
7480 upon failure. */
7481
7482 static int
7483 native_encode_string (const_tree expr, unsigned char *ptr, int len)
7484 {
7485 tree type = TREE_TYPE (expr);
7486 HOST_WIDE_INT total_bytes;
7487
7488 if (TREE_CODE (type) != ARRAY_TYPE
7489 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7490 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7491 || !host_integerp (TYPE_SIZE_UNIT (type), 0))
7492 return 0;
7493 total_bytes = tree_low_cst (TYPE_SIZE_UNIT (type), 0);
7494 if (total_bytes > len)
7495 return 0;
7496 if (TREE_STRING_LENGTH (expr) < total_bytes)
7497 {
7498 memcpy (ptr, TREE_STRING_POINTER (expr), TREE_STRING_LENGTH (expr));
7499 memset (ptr + TREE_STRING_LENGTH (expr), 0,
7500 total_bytes - TREE_STRING_LENGTH (expr));
7501 }
7502 else
7503 memcpy (ptr, TREE_STRING_POINTER (expr), total_bytes);
7504 return total_bytes;
7505 }
7506
7507
7508 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7509 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7510 buffer PTR of length LEN bytes. Return the number of bytes
7511 placed in the buffer, or zero upon failure. */
7512
7513 int
7514 native_encode_expr (const_tree expr, unsigned char *ptr, int len)
7515 {
7516 switch (TREE_CODE (expr))
7517 {
7518 case INTEGER_CST:
7519 return native_encode_int (expr, ptr, len);
7520
7521 case REAL_CST:
7522 return native_encode_real (expr, ptr, len);
7523
7524 case FIXED_CST:
7525 return native_encode_fixed (expr, ptr, len);
7526
7527 case COMPLEX_CST:
7528 return native_encode_complex (expr, ptr, len);
7529
7530 case VECTOR_CST:
7531 return native_encode_vector (expr, ptr, len);
7532
7533 case STRING_CST:
7534 return native_encode_string (expr, ptr, len);
7535
7536 default:
7537 return 0;
7538 }
7539 }
7540
7541
7542 /* Subroutine of native_interpret_expr. Interpret the contents of
7543 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7544 If the buffer cannot be interpreted, return NULL_TREE. */
7545
7546 static tree
7547 native_interpret_int (tree type, const unsigned char *ptr, int len)
7548 {
7549 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7550 double_int result;
7551
7552 if (total_bytes > len
7553 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7554 return NULL_TREE;
7555
7556 result = double_int::from_buffer (ptr, total_bytes);
7557
7558 return double_int_to_tree (type, result);
7559 }
7560
7561
7562 /* Subroutine of native_interpret_expr. Interpret the contents of
7563 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7564 If the buffer cannot be interpreted, return NULL_TREE. */
7565
7566 static tree
7567 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7568 {
7569 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7570 double_int result;
7571 FIXED_VALUE_TYPE fixed_value;
7572
7573 if (total_bytes > len
7574 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7575 return NULL_TREE;
7576
7577 result = double_int::from_buffer (ptr, total_bytes);
7578 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7579
7580 return build_fixed (type, fixed_value);
7581 }
7582
7583
7584 /* Subroutine of native_interpret_expr. Interpret the contents of
7585 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7586 If the buffer cannot be interpreted, return NULL_TREE. */
7587
7588 static tree
7589 native_interpret_real (tree type, const unsigned char *ptr, int len)
7590 {
7591 enum machine_mode mode = TYPE_MODE (type);
7592 int total_bytes = GET_MODE_SIZE (mode);
7593 int byte, offset, word, words, bitpos;
7594 unsigned char value;
7595 /* There are always 32 bits in each long, no matter the size of
7596 the hosts long. We handle floating point representations with
7597 up to 192 bits. */
7598 REAL_VALUE_TYPE r;
7599 long tmp[6];
7600
7601 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7602 if (total_bytes > len || total_bytes > 24)
7603 return NULL_TREE;
7604 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7605
7606 memset (tmp, 0, sizeof (tmp));
7607 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7608 bitpos += BITS_PER_UNIT)
7609 {
7610 byte = (bitpos / BITS_PER_UNIT) & 3;
7611 if (UNITS_PER_WORD < 4)
7612 {
7613 word = byte / UNITS_PER_WORD;
7614 if (WORDS_BIG_ENDIAN)
7615 word = (words - 1) - word;
7616 offset = word * UNITS_PER_WORD;
7617 if (BYTES_BIG_ENDIAN)
7618 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7619 else
7620 offset += byte % UNITS_PER_WORD;
7621 }
7622 else
7623 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7624 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7625
7626 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7627 }
7628
7629 real_from_target (&r, tmp, mode);
7630 return build_real (type, r);
7631 }
7632
7633
7634 /* Subroutine of native_interpret_expr. Interpret the contents of
7635 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7636 If the buffer cannot be interpreted, return NULL_TREE. */
7637
7638 static tree
7639 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7640 {
7641 tree etype, rpart, ipart;
7642 int size;
7643
7644 etype = TREE_TYPE (type);
7645 size = GET_MODE_SIZE (TYPE_MODE (etype));
7646 if (size * 2 > len)
7647 return NULL_TREE;
7648 rpart = native_interpret_expr (etype, ptr, size);
7649 if (!rpart)
7650 return NULL_TREE;
7651 ipart = native_interpret_expr (etype, ptr+size, size);
7652 if (!ipart)
7653 return NULL_TREE;
7654 return build_complex (type, rpart, ipart);
7655 }
7656
7657
7658 /* Subroutine of native_interpret_expr. Interpret the contents of
7659 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7660 If the buffer cannot be interpreted, return NULL_TREE. */
7661
7662 static tree
7663 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7664 {
7665 tree etype, elem;
7666 int i, size, count;
7667 tree *elements;
7668
7669 etype = TREE_TYPE (type);
7670 size = GET_MODE_SIZE (TYPE_MODE (etype));
7671 count = TYPE_VECTOR_SUBPARTS (type);
7672 if (size * count > len)
7673 return NULL_TREE;
7674
7675 elements = XALLOCAVEC (tree, count);
7676 for (i = count - 1; i >= 0; i--)
7677 {
7678 elem = native_interpret_expr (etype, ptr+(i*size), size);
7679 if (!elem)
7680 return NULL_TREE;
7681 elements[i] = elem;
7682 }
7683 return build_vector (type, elements);
7684 }
7685
7686
7687 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7688 the buffer PTR of length LEN as a constant of type TYPE. For
7689 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7690 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7691 return NULL_TREE. */
7692
7693 tree
7694 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7695 {
7696 switch (TREE_CODE (type))
7697 {
7698 case INTEGER_TYPE:
7699 case ENUMERAL_TYPE:
7700 case BOOLEAN_TYPE:
7701 case POINTER_TYPE:
7702 case REFERENCE_TYPE:
7703 return native_interpret_int (type, ptr, len);
7704
7705 case REAL_TYPE:
7706 return native_interpret_real (type, ptr, len);
7707
7708 case FIXED_POINT_TYPE:
7709 return native_interpret_fixed (type, ptr, len);
7710
7711 case COMPLEX_TYPE:
7712 return native_interpret_complex (type, ptr, len);
7713
7714 case VECTOR_TYPE:
7715 return native_interpret_vector (type, ptr, len);
7716
7717 default:
7718 return NULL_TREE;
7719 }
7720 }
7721
7722 /* Returns true if we can interpret the contents of a native encoding
7723 as TYPE. */
7724
7725 static bool
7726 can_native_interpret_type_p (tree type)
7727 {
7728 switch (TREE_CODE (type))
7729 {
7730 case INTEGER_TYPE:
7731 case ENUMERAL_TYPE:
7732 case BOOLEAN_TYPE:
7733 case POINTER_TYPE:
7734 case REFERENCE_TYPE:
7735 case FIXED_POINT_TYPE:
7736 case REAL_TYPE:
7737 case COMPLEX_TYPE:
7738 case VECTOR_TYPE:
7739 return true;
7740 default:
7741 return false;
7742 }
7743 }
7744
7745 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7746 TYPE at compile-time. If we're unable to perform the conversion
7747 return NULL_TREE. */
7748
7749 static tree
7750 fold_view_convert_expr (tree type, tree expr)
7751 {
7752 /* We support up to 512-bit values (for V8DFmode). */
7753 unsigned char buffer[64];
7754 int len;
7755
7756 /* Check that the host and target are sane. */
7757 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7758 return NULL_TREE;
7759
7760 len = native_encode_expr (expr, buffer, sizeof (buffer));
7761 if (len == 0)
7762 return NULL_TREE;
7763
7764 return native_interpret_expr (type, buffer, len);
7765 }
7766
7767 /* Build an expression for the address of T. Folds away INDIRECT_REF
7768 to avoid confusing the gimplify process. */
7769
7770 tree
7771 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7772 {
7773 /* The size of the object is not relevant when talking about its address. */
7774 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7775 t = TREE_OPERAND (t, 0);
7776
7777 if (TREE_CODE (t) == INDIRECT_REF)
7778 {
7779 t = TREE_OPERAND (t, 0);
7780
7781 if (TREE_TYPE (t) != ptrtype)
7782 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7783 }
7784 else if (TREE_CODE (t) == MEM_REF
7785 && integer_zerop (TREE_OPERAND (t, 1)))
7786 return TREE_OPERAND (t, 0);
7787 else if (TREE_CODE (t) == MEM_REF
7788 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7789 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7790 TREE_OPERAND (t, 0),
7791 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7792 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7793 {
7794 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7795
7796 if (TREE_TYPE (t) != ptrtype)
7797 t = fold_convert_loc (loc, ptrtype, t);
7798 }
7799 else
7800 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7801
7802 return t;
7803 }
7804
7805 /* Build an expression for the address of T. */
7806
7807 tree
7808 build_fold_addr_expr_loc (location_t loc, tree t)
7809 {
7810 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7811
7812 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7813 }
7814
7815 static bool vec_cst_ctor_to_array (tree, tree *);
7816
7817 /* Fold a unary expression of code CODE and type TYPE with operand
7818 OP0. Return the folded expression if folding is successful.
7819 Otherwise, return NULL_TREE. */
7820
7821 tree
7822 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7823 {
7824 tree tem;
7825 tree arg0;
7826 enum tree_code_class kind = TREE_CODE_CLASS (code);
7827
7828 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7829 && TREE_CODE_LENGTH (code) == 1);
7830
7831 arg0 = op0;
7832 if (arg0)
7833 {
7834 if (CONVERT_EXPR_CODE_P (code)
7835 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7836 {
7837 /* Don't use STRIP_NOPS, because signedness of argument type
7838 matters. */
7839 STRIP_SIGN_NOPS (arg0);
7840 }
7841 else
7842 {
7843 /* Strip any conversions that don't change the mode. This
7844 is safe for every expression, except for a comparison
7845 expression because its signedness is derived from its
7846 operands.
7847
7848 Note that this is done as an internal manipulation within
7849 the constant folder, in order to find the simplest
7850 representation of the arguments so that their form can be
7851 studied. In any cases, the appropriate type conversions
7852 should be put back in the tree that will get out of the
7853 constant folder. */
7854 STRIP_NOPS (arg0);
7855 }
7856 }
7857
7858 if (TREE_CODE_CLASS (code) == tcc_unary)
7859 {
7860 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7861 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7862 fold_build1_loc (loc, code, type,
7863 fold_convert_loc (loc, TREE_TYPE (op0),
7864 TREE_OPERAND (arg0, 1))));
7865 else if (TREE_CODE (arg0) == COND_EXPR)
7866 {
7867 tree arg01 = TREE_OPERAND (arg0, 1);
7868 tree arg02 = TREE_OPERAND (arg0, 2);
7869 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7870 arg01 = fold_build1_loc (loc, code, type,
7871 fold_convert_loc (loc,
7872 TREE_TYPE (op0), arg01));
7873 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7874 arg02 = fold_build1_loc (loc, code, type,
7875 fold_convert_loc (loc,
7876 TREE_TYPE (op0), arg02));
7877 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7878 arg01, arg02);
7879
7880 /* If this was a conversion, and all we did was to move into
7881 inside the COND_EXPR, bring it back out. But leave it if
7882 it is a conversion from integer to integer and the
7883 result precision is no wider than a word since such a
7884 conversion is cheap and may be optimized away by combine,
7885 while it couldn't if it were outside the COND_EXPR. Then return
7886 so we don't get into an infinite recursion loop taking the
7887 conversion out and then back in. */
7888
7889 if ((CONVERT_EXPR_CODE_P (code)
7890 || code == NON_LVALUE_EXPR)
7891 && TREE_CODE (tem) == COND_EXPR
7892 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7893 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7894 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7895 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7896 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7897 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7898 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7899 && (INTEGRAL_TYPE_P
7900 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7901 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7902 || flag_syntax_only))
7903 tem = build1_loc (loc, code, type,
7904 build3 (COND_EXPR,
7905 TREE_TYPE (TREE_OPERAND
7906 (TREE_OPERAND (tem, 1), 0)),
7907 TREE_OPERAND (tem, 0),
7908 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7909 TREE_OPERAND (TREE_OPERAND (tem, 2),
7910 0)));
7911 return tem;
7912 }
7913 }
7914
7915 switch (code)
7916 {
7917 case PAREN_EXPR:
7918 /* Re-association barriers around constants and other re-association
7919 barriers can be removed. */
7920 if (CONSTANT_CLASS_P (op0)
7921 || TREE_CODE (op0) == PAREN_EXPR)
7922 return fold_convert_loc (loc, type, op0);
7923 return NULL_TREE;
7924
7925 CASE_CONVERT:
7926 case FLOAT_EXPR:
7927 case FIX_TRUNC_EXPR:
7928 if (TREE_TYPE (op0) == type)
7929 return op0;
7930
7931 if (COMPARISON_CLASS_P (op0))
7932 {
7933 /* If we have (type) (a CMP b) and type is an integral type, return
7934 new expression involving the new type. Canonicalize
7935 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7936 non-integral type.
7937 Do not fold the result as that would not simplify further, also
7938 folding again results in recursions. */
7939 if (TREE_CODE (type) == BOOLEAN_TYPE)
7940 return build2_loc (loc, TREE_CODE (op0), type,
7941 TREE_OPERAND (op0, 0),
7942 TREE_OPERAND (op0, 1));
7943 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7944 && TREE_CODE (type) != VECTOR_TYPE)
7945 return build3_loc (loc, COND_EXPR, type, op0,
7946 constant_boolean_node (true, type),
7947 constant_boolean_node (false, type));
7948 }
7949
7950 /* Handle cases of two conversions in a row. */
7951 if (CONVERT_EXPR_P (op0))
7952 {
7953 tree inside_type = TREE_TYPE (TREE_OPERAND (op0, 0));
7954 tree inter_type = TREE_TYPE (op0);
7955 int inside_int = INTEGRAL_TYPE_P (inside_type);
7956 int inside_ptr = POINTER_TYPE_P (inside_type);
7957 int inside_float = FLOAT_TYPE_P (inside_type);
7958 int inside_vec = TREE_CODE (inside_type) == VECTOR_TYPE;
7959 unsigned int inside_prec = TYPE_PRECISION (inside_type);
7960 int inside_unsignedp = TYPE_UNSIGNED (inside_type);
7961 int inter_int = INTEGRAL_TYPE_P (inter_type);
7962 int inter_ptr = POINTER_TYPE_P (inter_type);
7963 int inter_float = FLOAT_TYPE_P (inter_type);
7964 int inter_vec = TREE_CODE (inter_type) == VECTOR_TYPE;
7965 unsigned int inter_prec = TYPE_PRECISION (inter_type);
7966 int inter_unsignedp = TYPE_UNSIGNED (inter_type);
7967 int final_int = INTEGRAL_TYPE_P (type);
7968 int final_ptr = POINTER_TYPE_P (type);
7969 int final_float = FLOAT_TYPE_P (type);
7970 int final_vec = TREE_CODE (type) == VECTOR_TYPE;
7971 unsigned int final_prec = TYPE_PRECISION (type);
7972 int final_unsignedp = TYPE_UNSIGNED (type);
7973
7974 /* In addition to the cases of two conversions in a row
7975 handled below, if we are converting something to its own
7976 type via an object of identical or wider precision, neither
7977 conversion is needed. */
7978 if (TYPE_MAIN_VARIANT (inside_type) == TYPE_MAIN_VARIANT (type)
7979 && (((inter_int || inter_ptr) && final_int)
7980 || (inter_float && final_float))
7981 && inter_prec >= final_prec)
7982 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
7983
7984 /* Likewise, if the intermediate and initial types are either both
7985 float or both integer, we don't need the middle conversion if the
7986 former is wider than the latter and doesn't change the signedness
7987 (for integers). Avoid this if the final type is a pointer since
7988 then we sometimes need the middle conversion. Likewise if the
7989 final type has a precision not equal to the size of its mode. */
7990 if (((inter_int && inside_int)
7991 || (inter_float && inside_float)
7992 || (inter_vec && inside_vec))
7993 && inter_prec >= inside_prec
7994 && (inter_float || inter_vec
7995 || inter_unsignedp == inside_unsignedp)
7996 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
7997 && TYPE_MODE (type) == TYPE_MODE (inter_type))
7998 && ! final_ptr
7999 && (! final_vec || inter_prec == inside_prec))
8000 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8001
8002 /* If we have a sign-extension of a zero-extended value, we can
8003 replace that by a single zero-extension. Likewise if the
8004 final conversion does not change precision we can drop the
8005 intermediate conversion. */
8006 if (inside_int && inter_int && final_int
8007 && ((inside_prec < inter_prec && inter_prec < final_prec
8008 && inside_unsignedp && !inter_unsignedp)
8009 || final_prec == inter_prec))
8010 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8011
8012 /* Two conversions in a row are not needed unless:
8013 - some conversion is floating-point (overstrict for now), or
8014 - some conversion is a vector (overstrict for now), or
8015 - the intermediate type is narrower than both initial and
8016 final, or
8017 - the intermediate type and innermost type differ in signedness,
8018 and the outermost type is wider than the intermediate, or
8019 - the initial type is a pointer type and the precisions of the
8020 intermediate and final types differ, or
8021 - the final type is a pointer type and the precisions of the
8022 initial and intermediate types differ. */
8023 if (! inside_float && ! inter_float && ! final_float
8024 && ! inside_vec && ! inter_vec && ! final_vec
8025 && (inter_prec >= inside_prec || inter_prec >= final_prec)
8026 && ! (inside_int && inter_int
8027 && inter_unsignedp != inside_unsignedp
8028 && inter_prec < final_prec)
8029 && ((inter_unsignedp && inter_prec > inside_prec)
8030 == (final_unsignedp && final_prec > inter_prec))
8031 && ! (inside_ptr && inter_prec != final_prec)
8032 && ! (final_ptr && inside_prec != inter_prec)
8033 && ! (final_prec != GET_MODE_PRECISION (TYPE_MODE (type))
8034 && TYPE_MODE (type) == TYPE_MODE (inter_type)))
8035 return fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 0));
8036 }
8037
8038 /* Handle (T *)&A.B.C for A being of type T and B and C
8039 living at offset zero. This occurs frequently in
8040 C++ upcasting and then accessing the base. */
8041 if (TREE_CODE (op0) == ADDR_EXPR
8042 && POINTER_TYPE_P (type)
8043 && handled_component_p (TREE_OPERAND (op0, 0)))
8044 {
8045 HOST_WIDE_INT bitsize, bitpos;
8046 tree offset;
8047 enum machine_mode mode;
8048 int unsignedp, volatilep;
8049 tree base = TREE_OPERAND (op0, 0);
8050 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
8051 &mode, &unsignedp, &volatilep, false);
8052 /* If the reference was to a (constant) zero offset, we can use
8053 the address of the base if it has the same base type
8054 as the result type and the pointer type is unqualified. */
8055 if (! offset && bitpos == 0
8056 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
8057 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
8058 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
8059 return fold_convert_loc (loc, type,
8060 build_fold_addr_expr_loc (loc, base));
8061 }
8062
8063 if (TREE_CODE (op0) == MODIFY_EXPR
8064 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8065 /* Detect assigning a bitfield. */
8066 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8067 && DECL_BIT_FIELD
8068 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8069 {
8070 /* Don't leave an assignment inside a conversion
8071 unless assigning a bitfield. */
8072 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8073 /* First do the assignment, then return converted constant. */
8074 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8075 TREE_NO_WARNING (tem) = 1;
8076 TREE_USED (tem) = 1;
8077 return tem;
8078 }
8079
8080 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8081 constants (if x has signed type, the sign bit cannot be set
8082 in c). This folds extension into the BIT_AND_EXPR.
8083 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8084 very likely don't have maximal range for their precision and this
8085 transformation effectively doesn't preserve non-maximal ranges. */
8086 if (TREE_CODE (type) == INTEGER_TYPE
8087 && TREE_CODE (op0) == BIT_AND_EXPR
8088 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8089 {
8090 tree and_expr = op0;
8091 tree and0 = TREE_OPERAND (and_expr, 0);
8092 tree and1 = TREE_OPERAND (and_expr, 1);
8093 int change = 0;
8094
8095 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8096 || (TYPE_PRECISION (type)
8097 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8098 change = 1;
8099 else if (TYPE_PRECISION (TREE_TYPE (and1))
8100 <= HOST_BITS_PER_WIDE_INT
8101 && host_integerp (and1, 1))
8102 {
8103 unsigned HOST_WIDE_INT cst;
8104
8105 cst = tree_low_cst (and1, 1);
8106 cst &= HOST_WIDE_INT_M1U
8107 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8108 change = (cst == 0);
8109 #ifdef LOAD_EXTEND_OP
8110 if (change
8111 && !flag_syntax_only
8112 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8113 == ZERO_EXTEND))
8114 {
8115 tree uns = unsigned_type_for (TREE_TYPE (and0));
8116 and0 = fold_convert_loc (loc, uns, and0);
8117 and1 = fold_convert_loc (loc, uns, and1);
8118 }
8119 #endif
8120 }
8121 if (change)
8122 {
8123 tem = force_fit_type_double (type, tree_to_double_int (and1),
8124 0, TREE_OVERFLOW (and1));
8125 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8126 fold_convert_loc (loc, type, and0), tem);
8127 }
8128 }
8129
8130 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8131 when one of the new casts will fold away. Conservatively we assume
8132 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8133 if (POINTER_TYPE_P (type)
8134 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8135 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8136 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8137 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8138 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8139 {
8140 tree arg00 = TREE_OPERAND (arg0, 0);
8141 tree arg01 = TREE_OPERAND (arg0, 1);
8142
8143 return fold_build_pointer_plus_loc
8144 (loc, fold_convert_loc (loc, type, arg00), arg01);
8145 }
8146
8147 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8148 of the same precision, and X is an integer type not narrower than
8149 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8150 if (INTEGRAL_TYPE_P (type)
8151 && TREE_CODE (op0) == BIT_NOT_EXPR
8152 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8153 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8154 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8155 {
8156 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8157 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8158 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8159 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8160 fold_convert_loc (loc, type, tem));
8161 }
8162
8163 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8164 type of X and Y (integer types only). */
8165 if (INTEGRAL_TYPE_P (type)
8166 && TREE_CODE (op0) == MULT_EXPR
8167 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8168 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8169 {
8170 /* Be careful not to introduce new overflows. */
8171 tree mult_type;
8172 if (TYPE_OVERFLOW_WRAPS (type))
8173 mult_type = type;
8174 else
8175 mult_type = unsigned_type_for (type);
8176
8177 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8178 {
8179 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8180 fold_convert_loc (loc, mult_type,
8181 TREE_OPERAND (op0, 0)),
8182 fold_convert_loc (loc, mult_type,
8183 TREE_OPERAND (op0, 1)));
8184 return fold_convert_loc (loc, type, tem);
8185 }
8186 }
8187
8188 tem = fold_convert_const (code, type, op0);
8189 return tem ? tem : NULL_TREE;
8190
8191 case ADDR_SPACE_CONVERT_EXPR:
8192 if (integer_zerop (arg0))
8193 return fold_convert_const (code, type, arg0);
8194 return NULL_TREE;
8195
8196 case FIXED_CONVERT_EXPR:
8197 tem = fold_convert_const (code, type, arg0);
8198 return tem ? tem : NULL_TREE;
8199
8200 case VIEW_CONVERT_EXPR:
8201 if (TREE_TYPE (op0) == type)
8202 return op0;
8203 if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
8204 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8205 type, TREE_OPERAND (op0, 0));
8206 if (TREE_CODE (op0) == MEM_REF)
8207 return fold_build2_loc (loc, MEM_REF, type,
8208 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8209
8210 /* For integral conversions with the same precision or pointer
8211 conversions use a NOP_EXPR instead. */
8212 if ((INTEGRAL_TYPE_P (type)
8213 || POINTER_TYPE_P (type))
8214 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8215 || POINTER_TYPE_P (TREE_TYPE (op0)))
8216 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8217 return fold_convert_loc (loc, type, op0);
8218
8219 /* Strip inner integral conversions that do not change the precision. */
8220 if (CONVERT_EXPR_P (op0)
8221 && (INTEGRAL_TYPE_P (TREE_TYPE (op0))
8222 || POINTER_TYPE_P (TREE_TYPE (op0)))
8223 && (INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0)))
8224 || POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (op0, 0))))
8225 && (TYPE_PRECISION (TREE_TYPE (op0))
8226 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (op0, 0)))))
8227 return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
8228 type, TREE_OPERAND (op0, 0));
8229
8230 return fold_view_convert_expr (type, op0);
8231
8232 case NEGATE_EXPR:
8233 tem = fold_negate_expr (loc, arg0);
8234 if (tem)
8235 return fold_convert_loc (loc, type, tem);
8236 return NULL_TREE;
8237
8238 case ABS_EXPR:
8239 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
8240 return fold_abs_const (arg0, type);
8241 else if (TREE_CODE (arg0) == NEGATE_EXPR)
8242 return fold_build1_loc (loc, ABS_EXPR, type, TREE_OPERAND (arg0, 0));
8243 /* Convert fabs((double)float) into (double)fabsf(float). */
8244 else if (TREE_CODE (arg0) == NOP_EXPR
8245 && TREE_CODE (type) == REAL_TYPE)
8246 {
8247 tree targ0 = strip_float_extensions (arg0);
8248 if (targ0 != arg0)
8249 return fold_convert_loc (loc, type,
8250 fold_build1_loc (loc, ABS_EXPR,
8251 TREE_TYPE (targ0),
8252 targ0));
8253 }
8254 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8255 else if (TREE_CODE (arg0) == ABS_EXPR)
8256 return arg0;
8257 else if (tree_expr_nonnegative_p (arg0))
8258 return arg0;
8259
8260 /* Strip sign ops from argument. */
8261 if (TREE_CODE (type) == REAL_TYPE)
8262 {
8263 tem = fold_strip_sign_ops (arg0);
8264 if (tem)
8265 return fold_build1_loc (loc, ABS_EXPR, type,
8266 fold_convert_loc (loc, type, tem));
8267 }
8268 return NULL_TREE;
8269
8270 case CONJ_EXPR:
8271 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8272 return fold_convert_loc (loc, type, arg0);
8273 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8274 {
8275 tree itype = TREE_TYPE (type);
8276 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8277 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8278 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8279 negate_expr (ipart));
8280 }
8281 if (TREE_CODE (arg0) == COMPLEX_CST)
8282 {
8283 tree itype = TREE_TYPE (type);
8284 tree rpart = fold_convert_loc (loc, itype, TREE_REALPART (arg0));
8285 tree ipart = fold_convert_loc (loc, itype, TREE_IMAGPART (arg0));
8286 return build_complex (type, rpart, negate_expr (ipart));
8287 }
8288 if (TREE_CODE (arg0) == CONJ_EXPR)
8289 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8290 return NULL_TREE;
8291
8292 case BIT_NOT_EXPR:
8293 if (TREE_CODE (arg0) == INTEGER_CST)
8294 return fold_not_const (arg0, type);
8295 else if (TREE_CODE (arg0) == BIT_NOT_EXPR)
8296 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8297 /* Convert ~ (-A) to A - 1. */
8298 else if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8299 return fold_build2_loc (loc, MINUS_EXPR, type,
8300 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8301 build_int_cst (type, 1));
8302 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8303 else if (INTEGRAL_TYPE_P (type)
8304 && ((TREE_CODE (arg0) == MINUS_EXPR
8305 && integer_onep (TREE_OPERAND (arg0, 1)))
8306 || (TREE_CODE (arg0) == PLUS_EXPR
8307 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8308 return fold_build1_loc (loc, NEGATE_EXPR, type,
8309 fold_convert_loc (loc, type,
8310 TREE_OPERAND (arg0, 0)));
8311 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8312 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8313 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8314 fold_convert_loc (loc, type,
8315 TREE_OPERAND (arg0, 0)))))
8316 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8317 fold_convert_loc (loc, type,
8318 TREE_OPERAND (arg0, 1)));
8319 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8320 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8321 fold_convert_loc (loc, type,
8322 TREE_OPERAND (arg0, 1)))))
8323 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8324 fold_convert_loc (loc, type,
8325 TREE_OPERAND (arg0, 0)), tem);
8326 /* Perform BIT_NOT_EXPR on each element individually. */
8327 else if (TREE_CODE (arg0) == VECTOR_CST)
8328 {
8329 tree *elements;
8330 tree elem;
8331 unsigned count = VECTOR_CST_NELTS (arg0), i;
8332
8333 elements = XALLOCAVEC (tree, count);
8334 for (i = 0; i < count; i++)
8335 {
8336 elem = VECTOR_CST_ELT (arg0, i);
8337 elem = fold_unary_loc (loc, BIT_NOT_EXPR, TREE_TYPE (type), elem);
8338 if (elem == NULL_TREE)
8339 break;
8340 elements[i] = elem;
8341 }
8342 if (i == count)
8343 return build_vector (type, elements);
8344 }
8345 else if (COMPARISON_CLASS_P (arg0)
8346 && (VECTOR_TYPE_P (type)
8347 || (INTEGRAL_TYPE_P (type) && TYPE_PRECISION (type) == 1)))
8348 {
8349 tree op_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
8350 enum tree_code subcode = invert_tree_comparison (TREE_CODE (arg0),
8351 HONOR_NANS (TYPE_MODE (op_type)));
8352 if (subcode != ERROR_MARK)
8353 return build2_loc (loc, subcode, type, TREE_OPERAND (arg0, 0),
8354 TREE_OPERAND (arg0, 1));
8355 }
8356
8357
8358 return NULL_TREE;
8359
8360 case TRUTH_NOT_EXPR:
8361 /* Note that the operand of this must be an int
8362 and its values must be 0 or 1.
8363 ("true" is a fixed value perhaps depending on the language,
8364 but we don't handle values other than 1 correctly yet.) */
8365 tem = fold_truth_not_expr (loc, arg0);
8366 if (!tem)
8367 return NULL_TREE;
8368 return fold_convert_loc (loc, type, tem);
8369
8370 case REALPART_EXPR:
8371 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8372 return fold_convert_loc (loc, type, arg0);
8373 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8374 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
8375 TREE_OPERAND (arg0, 1));
8376 if (TREE_CODE (arg0) == COMPLEX_CST)
8377 return fold_convert_loc (loc, type, TREE_REALPART (arg0));
8378 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8379 {
8380 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8381 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8382 fold_build1_loc (loc, REALPART_EXPR, itype,
8383 TREE_OPERAND (arg0, 0)),
8384 fold_build1_loc (loc, REALPART_EXPR, itype,
8385 TREE_OPERAND (arg0, 1)));
8386 return fold_convert_loc (loc, type, tem);
8387 }
8388 if (TREE_CODE (arg0) == CONJ_EXPR)
8389 {
8390 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8391 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8392 TREE_OPERAND (arg0, 0));
8393 return fold_convert_loc (loc, type, tem);
8394 }
8395 if (TREE_CODE (arg0) == CALL_EXPR)
8396 {
8397 tree fn = get_callee_fndecl (arg0);
8398 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8399 switch (DECL_FUNCTION_CODE (fn))
8400 {
8401 CASE_FLT_FN (BUILT_IN_CEXPI):
8402 fn = mathfn_built_in (type, BUILT_IN_COS);
8403 if (fn)
8404 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8405 break;
8406
8407 default:
8408 break;
8409 }
8410 }
8411 return NULL_TREE;
8412
8413 case IMAGPART_EXPR:
8414 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8415 return build_zero_cst (type);
8416 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8417 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 1),
8418 TREE_OPERAND (arg0, 0));
8419 if (TREE_CODE (arg0) == COMPLEX_CST)
8420 return fold_convert_loc (loc, type, TREE_IMAGPART (arg0));
8421 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8422 {
8423 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8424 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8425 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8426 TREE_OPERAND (arg0, 0)),
8427 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8428 TREE_OPERAND (arg0, 1)));
8429 return fold_convert_loc (loc, type, tem);
8430 }
8431 if (TREE_CODE (arg0) == CONJ_EXPR)
8432 {
8433 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8434 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8435 return fold_convert_loc (loc, type, negate_expr (tem));
8436 }
8437 if (TREE_CODE (arg0) == CALL_EXPR)
8438 {
8439 tree fn = get_callee_fndecl (arg0);
8440 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8441 switch (DECL_FUNCTION_CODE (fn))
8442 {
8443 CASE_FLT_FN (BUILT_IN_CEXPI):
8444 fn = mathfn_built_in (type, BUILT_IN_SIN);
8445 if (fn)
8446 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8447 break;
8448
8449 default:
8450 break;
8451 }
8452 }
8453 return NULL_TREE;
8454
8455 case INDIRECT_REF:
8456 /* Fold *&X to X if X is an lvalue. */
8457 if (TREE_CODE (op0) == ADDR_EXPR)
8458 {
8459 tree op00 = TREE_OPERAND (op0, 0);
8460 if ((TREE_CODE (op00) == VAR_DECL
8461 || TREE_CODE (op00) == PARM_DECL
8462 || TREE_CODE (op00) == RESULT_DECL)
8463 && !TREE_READONLY (op00))
8464 return op00;
8465 }
8466 return NULL_TREE;
8467
8468 case VEC_UNPACK_LO_EXPR:
8469 case VEC_UNPACK_HI_EXPR:
8470 case VEC_UNPACK_FLOAT_LO_EXPR:
8471 case VEC_UNPACK_FLOAT_HI_EXPR:
8472 {
8473 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8474 tree *elts;
8475 enum tree_code subcode;
8476
8477 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
8478 if (TREE_CODE (arg0) != VECTOR_CST)
8479 return NULL_TREE;
8480
8481 elts = XALLOCAVEC (tree, nelts * 2);
8482 if (!vec_cst_ctor_to_array (arg0, elts))
8483 return NULL_TREE;
8484
8485 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
8486 || code == VEC_UNPACK_FLOAT_LO_EXPR))
8487 elts += nelts;
8488
8489 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
8490 subcode = NOP_EXPR;
8491 else
8492 subcode = FLOAT_EXPR;
8493
8494 for (i = 0; i < nelts; i++)
8495 {
8496 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
8497 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
8498 return NULL_TREE;
8499 }
8500
8501 return build_vector (type, elts);
8502 }
8503
8504 case REDUC_MIN_EXPR:
8505 case REDUC_MAX_EXPR:
8506 case REDUC_PLUS_EXPR:
8507 {
8508 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8509 tree *elts;
8510 enum tree_code subcode;
8511
8512 if (TREE_CODE (op0) != VECTOR_CST)
8513 return NULL_TREE;
8514
8515 elts = XALLOCAVEC (tree, nelts);
8516 if (!vec_cst_ctor_to_array (op0, elts))
8517 return NULL_TREE;
8518
8519 switch (code)
8520 {
8521 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
8522 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
8523 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
8524 default: gcc_unreachable ();
8525 }
8526
8527 for (i = 1; i < nelts; i++)
8528 {
8529 elts[0] = const_binop (subcode, elts[0], elts[i]);
8530 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
8531 return NULL_TREE;
8532 elts[i] = build_zero_cst (TREE_TYPE (type));
8533 }
8534
8535 return build_vector (type, elts);
8536 }
8537
8538 default:
8539 return NULL_TREE;
8540 } /* switch (code) */
8541 }
8542
8543
8544 /* If the operation was a conversion do _not_ mark a resulting constant
8545 with TREE_OVERFLOW if the original constant was not. These conversions
8546 have implementation defined behavior and retaining the TREE_OVERFLOW
8547 flag here would confuse later passes such as VRP. */
8548 tree
8549 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8550 tree type, tree op0)
8551 {
8552 tree res = fold_unary_loc (loc, code, type, op0);
8553 if (res
8554 && TREE_CODE (res) == INTEGER_CST
8555 && TREE_CODE (op0) == INTEGER_CST
8556 && CONVERT_EXPR_CODE_P (code))
8557 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8558
8559 return res;
8560 }
8561
8562 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8563 operands OP0 and OP1. LOC is the location of the resulting expression.
8564 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8565 Return the folded expression if folding is successful. Otherwise,
8566 return NULL_TREE. */
8567 static tree
8568 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8569 tree arg0, tree arg1, tree op0, tree op1)
8570 {
8571 tree tem;
8572
8573 /* We only do these simplifications if we are optimizing. */
8574 if (!optimize)
8575 return NULL_TREE;
8576
8577 /* Check for things like (A || B) && (A || C). We can convert this
8578 to A || (B && C). Note that either operator can be any of the four
8579 truth and/or operations and the transformation will still be
8580 valid. Also note that we only care about order for the
8581 ANDIF and ORIF operators. If B contains side effects, this
8582 might change the truth-value of A. */
8583 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8584 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8585 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8586 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8587 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8588 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8589 {
8590 tree a00 = TREE_OPERAND (arg0, 0);
8591 tree a01 = TREE_OPERAND (arg0, 1);
8592 tree a10 = TREE_OPERAND (arg1, 0);
8593 tree a11 = TREE_OPERAND (arg1, 1);
8594 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8595 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8596 && (code == TRUTH_AND_EXPR
8597 || code == TRUTH_OR_EXPR));
8598
8599 if (operand_equal_p (a00, a10, 0))
8600 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8601 fold_build2_loc (loc, code, type, a01, a11));
8602 else if (commutative && operand_equal_p (a00, a11, 0))
8603 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8604 fold_build2_loc (loc, code, type, a01, a10));
8605 else if (commutative && operand_equal_p (a01, a10, 0))
8606 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8607 fold_build2_loc (loc, code, type, a00, a11));
8608
8609 /* This case if tricky because we must either have commutative
8610 operators or else A10 must not have side-effects. */
8611
8612 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8613 && operand_equal_p (a01, a11, 0))
8614 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8615 fold_build2_loc (loc, code, type, a00, a10),
8616 a01);
8617 }
8618
8619 /* See if we can build a range comparison. */
8620 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8621 return tem;
8622
8623 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8624 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8625 {
8626 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8627 if (tem)
8628 return fold_build2_loc (loc, code, type, tem, arg1);
8629 }
8630
8631 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8632 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8633 {
8634 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8635 if (tem)
8636 return fold_build2_loc (loc, code, type, arg0, tem);
8637 }
8638
8639 /* Check for the possibility of merging component references. If our
8640 lhs is another similar operation, try to merge its rhs with our
8641 rhs. Then try to merge our lhs and rhs. */
8642 if (TREE_CODE (arg0) == code
8643 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8644 TREE_OPERAND (arg0, 1), arg1)))
8645 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8646
8647 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8648 return tem;
8649
8650 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8651 && (code == TRUTH_AND_EXPR
8652 || code == TRUTH_ANDIF_EXPR
8653 || code == TRUTH_OR_EXPR
8654 || code == TRUTH_ORIF_EXPR))
8655 {
8656 enum tree_code ncode, icode;
8657
8658 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8659 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8660 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8661
8662 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8663 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8664 We don't want to pack more than two leafs to a non-IF AND/OR
8665 expression.
8666 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8667 equal to IF-CODE, then we don't want to add right-hand operand.
8668 If the inner right-hand side of left-hand operand has
8669 side-effects, or isn't simple, then we can't add to it,
8670 as otherwise we might destroy if-sequence. */
8671 if (TREE_CODE (arg0) == icode
8672 && simple_operand_p_2 (arg1)
8673 /* Needed for sequence points to handle trappings, and
8674 side-effects. */
8675 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8676 {
8677 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8678 arg1);
8679 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8680 tem);
8681 }
8682 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8683 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8684 else if (TREE_CODE (arg1) == icode
8685 && simple_operand_p_2 (arg0)
8686 /* Needed for sequence points to handle trappings, and
8687 side-effects. */
8688 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8689 {
8690 tem = fold_build2_loc (loc, ncode, type,
8691 arg0, TREE_OPERAND (arg1, 0));
8692 return fold_build2_loc (loc, icode, type, tem,
8693 TREE_OPERAND (arg1, 1));
8694 }
8695 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8696 into (A OR B).
8697 For sequence point consistancy, we need to check for trapping,
8698 and side-effects. */
8699 else if (code == icode && simple_operand_p_2 (arg0)
8700 && simple_operand_p_2 (arg1))
8701 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8702 }
8703
8704 return NULL_TREE;
8705 }
8706
8707 /* Fold a binary expression of code CODE and type TYPE with operands
8708 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8709 Return the folded expression if folding is successful. Otherwise,
8710 return NULL_TREE. */
8711
8712 static tree
8713 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8714 {
8715 enum tree_code compl_code;
8716
8717 if (code == MIN_EXPR)
8718 compl_code = MAX_EXPR;
8719 else if (code == MAX_EXPR)
8720 compl_code = MIN_EXPR;
8721 else
8722 gcc_unreachable ();
8723
8724 /* MIN (MAX (a, b), b) == b. */
8725 if (TREE_CODE (op0) == compl_code
8726 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8727 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8728
8729 /* MIN (MAX (b, a), b) == b. */
8730 if (TREE_CODE (op0) == compl_code
8731 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8732 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8733 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8734
8735 /* MIN (a, MAX (a, b)) == a. */
8736 if (TREE_CODE (op1) == compl_code
8737 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8738 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8739 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8740
8741 /* MIN (a, MAX (b, a)) == a. */
8742 if (TREE_CODE (op1) == compl_code
8743 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8744 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8745 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8746
8747 return NULL_TREE;
8748 }
8749
8750 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8751 by changing CODE to reduce the magnitude of constants involved in
8752 ARG0 of the comparison.
8753 Returns a canonicalized comparison tree if a simplification was
8754 possible, otherwise returns NULL_TREE.
8755 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8756 valid if signed overflow is undefined. */
8757
8758 static tree
8759 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8760 tree arg0, tree arg1,
8761 bool *strict_overflow_p)
8762 {
8763 enum tree_code code0 = TREE_CODE (arg0);
8764 tree t, cst0 = NULL_TREE;
8765 int sgn0;
8766 bool swap = false;
8767
8768 /* Match A +- CST code arg1 and CST code arg1. We can change the
8769 first form only if overflow is undefined. */
8770 if (!((TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8771 /* In principle pointers also have undefined overflow behavior,
8772 but that causes problems elsewhere. */
8773 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8774 && (code0 == MINUS_EXPR
8775 || code0 == PLUS_EXPR)
8776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8777 || code0 == INTEGER_CST))
8778 return NULL_TREE;
8779
8780 /* Identify the constant in arg0 and its sign. */
8781 if (code0 == INTEGER_CST)
8782 cst0 = arg0;
8783 else
8784 cst0 = TREE_OPERAND (arg0, 1);
8785 sgn0 = tree_int_cst_sgn (cst0);
8786
8787 /* Overflowed constants and zero will cause problems. */
8788 if (integer_zerop (cst0)
8789 || TREE_OVERFLOW (cst0))
8790 return NULL_TREE;
8791
8792 /* See if we can reduce the magnitude of the constant in
8793 arg0 by changing the comparison code. */
8794 if (code0 == INTEGER_CST)
8795 {
8796 /* CST <= arg1 -> CST-1 < arg1. */
8797 if (code == LE_EXPR && sgn0 == 1)
8798 code = LT_EXPR;
8799 /* -CST < arg1 -> -CST-1 <= arg1. */
8800 else if (code == LT_EXPR && sgn0 == -1)
8801 code = LE_EXPR;
8802 /* CST > arg1 -> CST-1 >= arg1. */
8803 else if (code == GT_EXPR && sgn0 == 1)
8804 code = GE_EXPR;
8805 /* -CST >= arg1 -> -CST-1 > arg1. */
8806 else if (code == GE_EXPR && sgn0 == -1)
8807 code = GT_EXPR;
8808 else
8809 return NULL_TREE;
8810 /* arg1 code' CST' might be more canonical. */
8811 swap = true;
8812 }
8813 else
8814 {
8815 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8816 if (code == LT_EXPR
8817 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8818 code = LE_EXPR;
8819 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8820 else if (code == GT_EXPR
8821 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8822 code = GE_EXPR;
8823 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8824 else if (code == LE_EXPR
8825 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8826 code = LT_EXPR;
8827 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8828 else if (code == GE_EXPR
8829 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8830 code = GT_EXPR;
8831 else
8832 return NULL_TREE;
8833 *strict_overflow_p = true;
8834 }
8835
8836 /* Now build the constant reduced in magnitude. But not if that
8837 would produce one outside of its types range. */
8838 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8839 && ((sgn0 == 1
8840 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8841 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8842 || (sgn0 == -1
8843 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8844 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8845 /* We cannot swap the comparison here as that would cause us to
8846 endlessly recurse. */
8847 return NULL_TREE;
8848
8849 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8850 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8851 if (code0 != INTEGER_CST)
8852 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8853 t = fold_convert (TREE_TYPE (arg1), t);
8854
8855 /* If swapping might yield to a more canonical form, do so. */
8856 if (swap)
8857 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8858 else
8859 return fold_build2_loc (loc, code, type, t, arg1);
8860 }
8861
8862 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8863 overflow further. Try to decrease the magnitude of constants involved
8864 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8865 and put sole constants at the second argument position.
8866 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8867
8868 static tree
8869 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8870 tree arg0, tree arg1)
8871 {
8872 tree t;
8873 bool strict_overflow_p;
8874 const char * const warnmsg = G_("assuming signed overflow does not occur "
8875 "when reducing constant in comparison");
8876
8877 /* Try canonicalization by simplifying arg0. */
8878 strict_overflow_p = false;
8879 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8880 &strict_overflow_p);
8881 if (t)
8882 {
8883 if (strict_overflow_p)
8884 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8885 return t;
8886 }
8887
8888 /* Try canonicalization by simplifying arg1 using the swapped
8889 comparison. */
8890 code = swap_tree_comparison (code);
8891 strict_overflow_p = false;
8892 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8893 &strict_overflow_p);
8894 if (t && strict_overflow_p)
8895 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8896 return t;
8897 }
8898
8899 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8900 space. This is used to avoid issuing overflow warnings for
8901 expressions like &p->x which can not wrap. */
8902
8903 static bool
8904 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8905 {
8906 double_int di_offset, total;
8907
8908 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8909 return true;
8910
8911 if (bitpos < 0)
8912 return true;
8913
8914 if (offset == NULL_TREE)
8915 di_offset = double_int_zero;
8916 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8917 return true;
8918 else
8919 di_offset = TREE_INT_CST (offset);
8920
8921 bool overflow;
8922 double_int units = double_int::from_uhwi (bitpos / BITS_PER_UNIT);
8923 total = di_offset.add_with_sign (units, true, &overflow);
8924 if (overflow)
8925 return true;
8926
8927 if (total.high != 0)
8928 return true;
8929
8930 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8931 if (size <= 0)
8932 return true;
8933
8934 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8935 array. */
8936 if (TREE_CODE (base) == ADDR_EXPR)
8937 {
8938 HOST_WIDE_INT base_size;
8939
8940 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8941 if (base_size > 0 && size < base_size)
8942 size = base_size;
8943 }
8944
8945 return total.low > (unsigned HOST_WIDE_INT) size;
8946 }
8947
8948 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8949 kind INTEGER_CST. This makes sure to properly sign-extend the
8950 constant. */
8951
8952 static HOST_WIDE_INT
8953 size_low_cst (const_tree t)
8954 {
8955 double_int d = tree_to_double_int (t);
8956 return d.sext (TYPE_PRECISION (TREE_TYPE (t))).low;
8957 }
8958
8959 /* Subroutine of fold_binary. This routine performs all of the
8960 transformations that are common to the equality/inequality
8961 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8962 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8963 fold_binary should call fold_binary. Fold a comparison with
8964 tree code CODE and type TYPE with operands OP0 and OP1. Return
8965 the folded comparison or NULL_TREE. */
8966
8967 static tree
8968 fold_comparison (location_t loc, enum tree_code code, tree type,
8969 tree op0, tree op1)
8970 {
8971 tree arg0, arg1, tem;
8972
8973 arg0 = op0;
8974 arg1 = op1;
8975
8976 STRIP_SIGN_NOPS (arg0);
8977 STRIP_SIGN_NOPS (arg1);
8978
8979 tem = fold_relational_const (code, type, arg0, arg1);
8980 if (tem != NULL_TREE)
8981 return tem;
8982
8983 /* If one arg is a real or integer constant, put it last. */
8984 if (tree_swap_operands_p (arg0, arg1, true))
8985 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
8986
8987 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 +- C1. */
8988 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8989 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8990 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8991 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
8992 && (TREE_CODE (arg1) == INTEGER_CST
8993 && !TREE_OVERFLOW (arg1)))
8994 {
8995 tree const1 = TREE_OPERAND (arg0, 1);
8996 tree const2 = arg1;
8997 tree variable = TREE_OPERAND (arg0, 0);
8998 tree lhs;
8999 int lhs_add;
9000 lhs_add = TREE_CODE (arg0) != PLUS_EXPR;
9001
9002 lhs = fold_build2_loc (loc, lhs_add ? PLUS_EXPR : MINUS_EXPR,
9003 TREE_TYPE (arg1), const2, const1);
9004
9005 /* If the constant operation overflowed this can be
9006 simplified as a comparison against INT_MAX/INT_MIN. */
9007 if (TREE_CODE (lhs) == INTEGER_CST
9008 && TREE_OVERFLOW (lhs))
9009 {
9010 int const1_sgn = tree_int_cst_sgn (const1);
9011 enum tree_code code2 = code;
9012
9013 /* Get the sign of the constant on the lhs if the
9014 operation were VARIABLE + CONST1. */
9015 if (TREE_CODE (arg0) == MINUS_EXPR)
9016 const1_sgn = -const1_sgn;
9017
9018 /* The sign of the constant determines if we overflowed
9019 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
9020 Canonicalize to the INT_MIN overflow by swapping the comparison
9021 if necessary. */
9022 if (const1_sgn == -1)
9023 code2 = swap_tree_comparison (code);
9024
9025 /* We now can look at the canonicalized case
9026 VARIABLE + 1 CODE2 INT_MIN
9027 and decide on the result. */
9028 if (code2 == LT_EXPR
9029 || code2 == LE_EXPR
9030 || code2 == EQ_EXPR)
9031 return omit_one_operand_loc (loc, type, boolean_false_node, variable);
9032 else if (code2 == NE_EXPR
9033 || code2 == GE_EXPR
9034 || code2 == GT_EXPR)
9035 return omit_one_operand_loc (loc, type, boolean_true_node, variable);
9036 }
9037
9038 if (TREE_CODE (lhs) == TREE_CODE (arg1)
9039 && (TREE_CODE (lhs) != INTEGER_CST
9040 || !TREE_OVERFLOW (lhs)))
9041 {
9042 if (code != EQ_EXPR && code != NE_EXPR)
9043 fold_overflow_warning ("assuming signed overflow does not occur "
9044 "when changing X +- C1 cmp C2 to "
9045 "X cmp C1 +- C2",
9046 WARN_STRICT_OVERFLOW_COMPARISON);
9047 return fold_build2_loc (loc, code, type, variable, lhs);
9048 }
9049 }
9050
9051 /* For comparisons of pointers we can decompose it to a compile time
9052 comparison of the base objects and the offsets into the object.
9053 This requires at least one operand being an ADDR_EXPR or a
9054 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
9055 if (POINTER_TYPE_P (TREE_TYPE (arg0))
9056 && (TREE_CODE (arg0) == ADDR_EXPR
9057 || TREE_CODE (arg1) == ADDR_EXPR
9058 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
9059 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
9060 {
9061 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
9062 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
9063 enum machine_mode mode;
9064 int volatilep, unsignedp;
9065 bool indirect_base0 = false, indirect_base1 = false;
9066
9067 /* Get base and offset for the access. Strip ADDR_EXPR for
9068 get_inner_reference, but put it back by stripping INDIRECT_REF
9069 off the base object if possible. indirect_baseN will be true
9070 if baseN is not an address but refers to the object itself. */
9071 base0 = arg0;
9072 if (TREE_CODE (arg0) == ADDR_EXPR)
9073 {
9074 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
9075 &bitsize, &bitpos0, &offset0, &mode,
9076 &unsignedp, &volatilep, false);
9077 if (TREE_CODE (base0) == INDIRECT_REF)
9078 base0 = TREE_OPERAND (base0, 0);
9079 else
9080 indirect_base0 = true;
9081 }
9082 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
9083 {
9084 base0 = TREE_OPERAND (arg0, 0);
9085 STRIP_SIGN_NOPS (base0);
9086 if (TREE_CODE (base0) == ADDR_EXPR)
9087 {
9088 base0 = TREE_OPERAND (base0, 0);
9089 indirect_base0 = true;
9090 }
9091 offset0 = TREE_OPERAND (arg0, 1);
9092 if (host_integerp (offset0, 0))
9093 {
9094 HOST_WIDE_INT off = size_low_cst (offset0);
9095 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9096 * BITS_PER_UNIT)
9097 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9098 {
9099 bitpos0 = off * BITS_PER_UNIT;
9100 offset0 = NULL_TREE;
9101 }
9102 }
9103 }
9104
9105 base1 = arg1;
9106 if (TREE_CODE (arg1) == ADDR_EXPR)
9107 {
9108 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
9109 &bitsize, &bitpos1, &offset1, &mode,
9110 &unsignedp, &volatilep, false);
9111 if (TREE_CODE (base1) == INDIRECT_REF)
9112 base1 = TREE_OPERAND (base1, 0);
9113 else
9114 indirect_base1 = true;
9115 }
9116 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
9117 {
9118 base1 = TREE_OPERAND (arg1, 0);
9119 STRIP_SIGN_NOPS (base1);
9120 if (TREE_CODE (base1) == ADDR_EXPR)
9121 {
9122 base1 = TREE_OPERAND (base1, 0);
9123 indirect_base1 = true;
9124 }
9125 offset1 = TREE_OPERAND (arg1, 1);
9126 if (host_integerp (offset1, 0))
9127 {
9128 HOST_WIDE_INT off = size_low_cst (offset1);
9129 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
9130 * BITS_PER_UNIT)
9131 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
9132 {
9133 bitpos1 = off * BITS_PER_UNIT;
9134 offset1 = NULL_TREE;
9135 }
9136 }
9137 }
9138
9139 /* A local variable can never be pointed to by
9140 the default SSA name of an incoming parameter. */
9141 if ((TREE_CODE (arg0) == ADDR_EXPR
9142 && indirect_base0
9143 && TREE_CODE (base0) == VAR_DECL
9144 && auto_var_in_fn_p (base0, current_function_decl)
9145 && !indirect_base1
9146 && TREE_CODE (base1) == SSA_NAME
9147 && SSA_NAME_IS_DEFAULT_DEF (base1)
9148 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
9149 || (TREE_CODE (arg1) == ADDR_EXPR
9150 && indirect_base1
9151 && TREE_CODE (base1) == VAR_DECL
9152 && auto_var_in_fn_p (base1, current_function_decl)
9153 && !indirect_base0
9154 && TREE_CODE (base0) == SSA_NAME
9155 && SSA_NAME_IS_DEFAULT_DEF (base0)
9156 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
9157 {
9158 if (code == NE_EXPR)
9159 return constant_boolean_node (1, type);
9160 else if (code == EQ_EXPR)
9161 return constant_boolean_node (0, type);
9162 }
9163 /* If we have equivalent bases we might be able to simplify. */
9164 else if (indirect_base0 == indirect_base1
9165 && operand_equal_p (base0, base1, 0))
9166 {
9167 /* We can fold this expression to a constant if the non-constant
9168 offset parts are equal. */
9169 if ((offset0 == offset1
9170 || (offset0 && offset1
9171 && operand_equal_p (offset0, offset1, 0)))
9172 && (code == EQ_EXPR
9173 || code == NE_EXPR
9174 || (indirect_base0 && DECL_P (base0))
9175 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9176
9177 {
9178 if (code != EQ_EXPR
9179 && code != NE_EXPR
9180 && bitpos0 != bitpos1
9181 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9182 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9183 fold_overflow_warning (("assuming pointer wraparound does not "
9184 "occur when comparing P +- C1 with "
9185 "P +- C2"),
9186 WARN_STRICT_OVERFLOW_CONDITIONAL);
9187
9188 switch (code)
9189 {
9190 case EQ_EXPR:
9191 return constant_boolean_node (bitpos0 == bitpos1, type);
9192 case NE_EXPR:
9193 return constant_boolean_node (bitpos0 != bitpos1, type);
9194 case LT_EXPR:
9195 return constant_boolean_node (bitpos0 < bitpos1, type);
9196 case LE_EXPR:
9197 return constant_boolean_node (bitpos0 <= bitpos1, type);
9198 case GE_EXPR:
9199 return constant_boolean_node (bitpos0 >= bitpos1, type);
9200 case GT_EXPR:
9201 return constant_boolean_node (bitpos0 > bitpos1, type);
9202 default:;
9203 }
9204 }
9205 /* We can simplify the comparison to a comparison of the variable
9206 offset parts if the constant offset parts are equal.
9207 Be careful to use signed sizetype here because otherwise we
9208 mess with array offsets in the wrong way. This is possible
9209 because pointer arithmetic is restricted to retain within an
9210 object and overflow on pointer differences is undefined as of
9211 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9212 else if (bitpos0 == bitpos1
9213 && ((code == EQ_EXPR || code == NE_EXPR)
9214 || (indirect_base0 && DECL_P (base0))
9215 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9216 {
9217 /* By converting to signed sizetype we cover middle-end pointer
9218 arithmetic which operates on unsigned pointer types of size
9219 type size and ARRAY_REF offsets which are properly sign or
9220 zero extended from their type in case it is narrower than
9221 sizetype. */
9222 if (offset0 == NULL_TREE)
9223 offset0 = build_int_cst (ssizetype, 0);
9224 else
9225 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9226 if (offset1 == NULL_TREE)
9227 offset1 = build_int_cst (ssizetype, 0);
9228 else
9229 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9230
9231 if (code != EQ_EXPR
9232 && code != NE_EXPR
9233 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9234 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9235 fold_overflow_warning (("assuming pointer wraparound does not "
9236 "occur when comparing P +- C1 with "
9237 "P +- C2"),
9238 WARN_STRICT_OVERFLOW_COMPARISON);
9239
9240 return fold_build2_loc (loc, code, type, offset0, offset1);
9241 }
9242 }
9243 /* For non-equal bases we can simplify if they are addresses
9244 of local binding decls or constants. */
9245 else if (indirect_base0 && indirect_base1
9246 /* We know that !operand_equal_p (base0, base1, 0)
9247 because the if condition was false. But make
9248 sure two decls are not the same. */
9249 && base0 != base1
9250 && TREE_CODE (arg0) == ADDR_EXPR
9251 && TREE_CODE (arg1) == ADDR_EXPR
9252 && (((TREE_CODE (base0) == VAR_DECL
9253 || TREE_CODE (base0) == PARM_DECL)
9254 && (targetm.binds_local_p (base0)
9255 || CONSTANT_CLASS_P (base1)))
9256 || CONSTANT_CLASS_P (base0))
9257 && (((TREE_CODE (base1) == VAR_DECL
9258 || TREE_CODE (base1) == PARM_DECL)
9259 && (targetm.binds_local_p (base1)
9260 || CONSTANT_CLASS_P (base0)))
9261 || CONSTANT_CLASS_P (base1)))
9262 {
9263 if (code == EQ_EXPR)
9264 return omit_two_operands_loc (loc, type, boolean_false_node,
9265 arg0, arg1);
9266 else if (code == NE_EXPR)
9267 return omit_two_operands_loc (loc, type, boolean_true_node,
9268 arg0, arg1);
9269 }
9270 /* For equal offsets we can simplify to a comparison of the
9271 base addresses. */
9272 else if (bitpos0 == bitpos1
9273 && (indirect_base0
9274 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9275 && (indirect_base1
9276 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9277 && ((offset0 == offset1)
9278 || (offset0 && offset1
9279 && operand_equal_p (offset0, offset1, 0))))
9280 {
9281 if (indirect_base0)
9282 base0 = build_fold_addr_expr_loc (loc, base0);
9283 if (indirect_base1)
9284 base1 = build_fold_addr_expr_loc (loc, base1);
9285 return fold_build2_loc (loc, code, type, base0, base1);
9286 }
9287 }
9288
9289 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9290 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9291 the resulting offset is smaller in absolute value than the
9292 original one. */
9293 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9294 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9295 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9296 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9297 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9298 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9299 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9300 {
9301 tree const1 = TREE_OPERAND (arg0, 1);
9302 tree const2 = TREE_OPERAND (arg1, 1);
9303 tree variable1 = TREE_OPERAND (arg0, 0);
9304 tree variable2 = TREE_OPERAND (arg1, 0);
9305 tree cst;
9306 const char * const warnmsg = G_("assuming signed overflow does not "
9307 "occur when combining constants around "
9308 "a comparison");
9309
9310 /* Put the constant on the side where it doesn't overflow and is
9311 of lower absolute value than before. */
9312 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9313 ? MINUS_EXPR : PLUS_EXPR,
9314 const2, const1);
9315 if (!TREE_OVERFLOW (cst)
9316 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2))
9317 {
9318 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9319 return fold_build2_loc (loc, code, type,
9320 variable1,
9321 fold_build2_loc (loc,
9322 TREE_CODE (arg1), TREE_TYPE (arg1),
9323 variable2, cst));
9324 }
9325
9326 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9327 ? MINUS_EXPR : PLUS_EXPR,
9328 const1, const2);
9329 if (!TREE_OVERFLOW (cst)
9330 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1))
9331 {
9332 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9333 return fold_build2_loc (loc, code, type,
9334 fold_build2_loc (loc, TREE_CODE (arg0), TREE_TYPE (arg0),
9335 variable1, cst),
9336 variable2);
9337 }
9338 }
9339
9340 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9341 signed arithmetic case. That form is created by the compiler
9342 often enough for folding it to be of value. One example is in
9343 computing loop trip counts after Operator Strength Reduction. */
9344 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9345 && TREE_CODE (arg0) == MULT_EXPR
9346 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9347 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9348 && integer_zerop (arg1))
9349 {
9350 tree const1 = TREE_OPERAND (arg0, 1);
9351 tree const2 = arg1; /* zero */
9352 tree variable1 = TREE_OPERAND (arg0, 0);
9353 enum tree_code cmp_code = code;
9354
9355 /* Handle unfolded multiplication by zero. */
9356 if (integer_zerop (const1))
9357 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9358
9359 fold_overflow_warning (("assuming signed overflow does not occur when "
9360 "eliminating multiplication in comparison "
9361 "with zero"),
9362 WARN_STRICT_OVERFLOW_COMPARISON);
9363
9364 /* If const1 is negative we swap the sense of the comparison. */
9365 if (tree_int_cst_sgn (const1) < 0)
9366 cmp_code = swap_tree_comparison (cmp_code);
9367
9368 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9369 }
9370
9371 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9372 if (tem)
9373 return tem;
9374
9375 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9376 {
9377 tree targ0 = strip_float_extensions (arg0);
9378 tree targ1 = strip_float_extensions (arg1);
9379 tree newtype = TREE_TYPE (targ0);
9380
9381 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9382 newtype = TREE_TYPE (targ1);
9383
9384 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9385 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9386 return fold_build2_loc (loc, code, type,
9387 fold_convert_loc (loc, newtype, targ0),
9388 fold_convert_loc (loc, newtype, targ1));
9389
9390 /* (-a) CMP (-b) -> b CMP a */
9391 if (TREE_CODE (arg0) == NEGATE_EXPR
9392 && TREE_CODE (arg1) == NEGATE_EXPR)
9393 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9394 TREE_OPERAND (arg0, 0));
9395
9396 if (TREE_CODE (arg1) == REAL_CST)
9397 {
9398 REAL_VALUE_TYPE cst;
9399 cst = TREE_REAL_CST (arg1);
9400
9401 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9402 if (TREE_CODE (arg0) == NEGATE_EXPR)
9403 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9404 TREE_OPERAND (arg0, 0),
9405 build_real (TREE_TYPE (arg1),
9406 real_value_negate (&cst)));
9407
9408 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9409 /* a CMP (-0) -> a CMP 0 */
9410 if (REAL_VALUE_MINUS_ZERO (cst))
9411 return fold_build2_loc (loc, code, type, arg0,
9412 build_real (TREE_TYPE (arg1), dconst0));
9413
9414 /* x != NaN is always true, other ops are always false. */
9415 if (REAL_VALUE_ISNAN (cst)
9416 && ! HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1))))
9417 {
9418 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9419 return omit_one_operand_loc (loc, type, tem, arg0);
9420 }
9421
9422 /* Fold comparisons against infinity. */
9423 if (REAL_VALUE_ISINF (cst)
9424 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9425 {
9426 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9427 if (tem != NULL_TREE)
9428 return tem;
9429 }
9430 }
9431
9432 /* If this is a comparison of a real constant with a PLUS_EXPR
9433 or a MINUS_EXPR of a real constant, we can convert it into a
9434 comparison with a revised real constant as long as no overflow
9435 occurs when unsafe_math_optimizations are enabled. */
9436 if (flag_unsafe_math_optimizations
9437 && TREE_CODE (arg1) == REAL_CST
9438 && (TREE_CODE (arg0) == PLUS_EXPR
9439 || TREE_CODE (arg0) == MINUS_EXPR)
9440 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9441 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9442 ? MINUS_EXPR : PLUS_EXPR,
9443 arg1, TREE_OPERAND (arg0, 1)))
9444 && !TREE_OVERFLOW (tem))
9445 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9446
9447 /* Likewise, we can simplify a comparison of a real constant with
9448 a MINUS_EXPR whose first operand is also a real constant, i.e.
9449 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9450 floating-point types only if -fassociative-math is set. */
9451 if (flag_associative_math
9452 && TREE_CODE (arg1) == REAL_CST
9453 && TREE_CODE (arg0) == MINUS_EXPR
9454 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9455 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9456 arg1))
9457 && !TREE_OVERFLOW (tem))
9458 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9459 TREE_OPERAND (arg0, 1), tem);
9460
9461 /* Fold comparisons against built-in math functions. */
9462 if (TREE_CODE (arg1) == REAL_CST
9463 && flag_unsafe_math_optimizations
9464 && ! flag_errno_math)
9465 {
9466 enum built_in_function fcode = builtin_mathfn_code (arg0);
9467
9468 if (fcode != END_BUILTINS)
9469 {
9470 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9471 if (tem != NULL_TREE)
9472 return tem;
9473 }
9474 }
9475 }
9476
9477 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9478 && CONVERT_EXPR_P (arg0))
9479 {
9480 /* If we are widening one operand of an integer comparison,
9481 see if the other operand is similarly being widened. Perhaps we
9482 can do the comparison in the narrower type. */
9483 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9484 if (tem)
9485 return tem;
9486
9487 /* Or if we are changing signedness. */
9488 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9489 if (tem)
9490 return tem;
9491 }
9492
9493 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9494 constant, we can simplify it. */
9495 if (TREE_CODE (arg1) == INTEGER_CST
9496 && (TREE_CODE (arg0) == MIN_EXPR
9497 || TREE_CODE (arg0) == MAX_EXPR)
9498 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9499 {
9500 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9501 if (tem)
9502 return tem;
9503 }
9504
9505 /* Simplify comparison of something with itself. (For IEEE
9506 floating-point, we can only do some of these simplifications.) */
9507 if (operand_equal_p (arg0, arg1, 0))
9508 {
9509 switch (code)
9510 {
9511 case EQ_EXPR:
9512 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9513 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9514 return constant_boolean_node (1, type);
9515 break;
9516
9517 case GE_EXPR:
9518 case LE_EXPR:
9519 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9520 || ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9521 return constant_boolean_node (1, type);
9522 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9523
9524 case NE_EXPR:
9525 /* For NE, we can only do this simplification if integer
9526 or we don't honor IEEE floating point NaNs. */
9527 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9528 && HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0))))
9529 break;
9530 /* ... fall through ... */
9531 case GT_EXPR:
9532 case LT_EXPR:
9533 return constant_boolean_node (0, type);
9534 default:
9535 gcc_unreachable ();
9536 }
9537 }
9538
9539 /* If we are comparing an expression that just has comparisons
9540 of two integer values, arithmetic expressions of those comparisons,
9541 and constants, we can simplify it. There are only three cases
9542 to check: the two values can either be equal, the first can be
9543 greater, or the second can be greater. Fold the expression for
9544 those three values. Since each value must be 0 or 1, we have
9545 eight possibilities, each of which corresponds to the constant 0
9546 or 1 or one of the six possible comparisons.
9547
9548 This handles common cases like (a > b) == 0 but also handles
9549 expressions like ((x > y) - (y > x)) > 0, which supposedly
9550 occur in macroized code. */
9551
9552 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9553 {
9554 tree cval1 = 0, cval2 = 0;
9555 int save_p = 0;
9556
9557 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9558 /* Don't handle degenerate cases here; they should already
9559 have been handled anyway. */
9560 && cval1 != 0 && cval2 != 0
9561 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9562 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9563 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9564 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9565 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9566 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9567 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9568 {
9569 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9570 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9571
9572 /* We can't just pass T to eval_subst in case cval1 or cval2
9573 was the same as ARG1. */
9574
9575 tree high_result
9576 = fold_build2_loc (loc, code, type,
9577 eval_subst (loc, arg0, cval1, maxval,
9578 cval2, minval),
9579 arg1);
9580 tree equal_result
9581 = fold_build2_loc (loc, code, type,
9582 eval_subst (loc, arg0, cval1, maxval,
9583 cval2, maxval),
9584 arg1);
9585 tree low_result
9586 = fold_build2_loc (loc, code, type,
9587 eval_subst (loc, arg0, cval1, minval,
9588 cval2, maxval),
9589 arg1);
9590
9591 /* All three of these results should be 0 or 1. Confirm they are.
9592 Then use those values to select the proper code to use. */
9593
9594 if (TREE_CODE (high_result) == INTEGER_CST
9595 && TREE_CODE (equal_result) == INTEGER_CST
9596 && TREE_CODE (low_result) == INTEGER_CST)
9597 {
9598 /* Make a 3-bit mask with the high-order bit being the
9599 value for `>', the next for '=', and the low for '<'. */
9600 switch ((integer_onep (high_result) * 4)
9601 + (integer_onep (equal_result) * 2)
9602 + integer_onep (low_result))
9603 {
9604 case 0:
9605 /* Always false. */
9606 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9607 case 1:
9608 code = LT_EXPR;
9609 break;
9610 case 2:
9611 code = EQ_EXPR;
9612 break;
9613 case 3:
9614 code = LE_EXPR;
9615 break;
9616 case 4:
9617 code = GT_EXPR;
9618 break;
9619 case 5:
9620 code = NE_EXPR;
9621 break;
9622 case 6:
9623 code = GE_EXPR;
9624 break;
9625 case 7:
9626 /* Always true. */
9627 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9628 }
9629
9630 if (save_p)
9631 {
9632 tem = save_expr (build2 (code, type, cval1, cval2));
9633 SET_EXPR_LOCATION (tem, loc);
9634 return tem;
9635 }
9636 return fold_build2_loc (loc, code, type, cval1, cval2);
9637 }
9638 }
9639 }
9640
9641 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9642 into a single range test. */
9643 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9644 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9645 && TREE_CODE (arg1) == INTEGER_CST
9646 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9647 && !integer_zerop (TREE_OPERAND (arg0, 1))
9648 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9649 && !TREE_OVERFLOW (arg1))
9650 {
9651 tem = fold_div_compare (loc, code, type, arg0, arg1);
9652 if (tem != NULL_TREE)
9653 return tem;
9654 }
9655
9656 /* Fold ~X op ~Y as Y op X. */
9657 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9658 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9659 {
9660 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9661 return fold_build2_loc (loc, code, type,
9662 fold_convert_loc (loc, cmp_type,
9663 TREE_OPERAND (arg1, 0)),
9664 TREE_OPERAND (arg0, 0));
9665 }
9666
9667 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9668 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9669 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9670 {
9671 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9672 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9673 TREE_OPERAND (arg0, 0),
9674 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9675 fold_convert_loc (loc, cmp_type, arg1)));
9676 }
9677
9678 return NULL_TREE;
9679 }
9680
9681
9682 /* Subroutine of fold_binary. Optimize complex multiplications of the
9683 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9684 argument EXPR represents the expression "z" of type TYPE. */
9685
9686 static tree
9687 fold_mult_zconjz (location_t loc, tree type, tree expr)
9688 {
9689 tree itype = TREE_TYPE (type);
9690 tree rpart, ipart, tem;
9691
9692 if (TREE_CODE (expr) == COMPLEX_EXPR)
9693 {
9694 rpart = TREE_OPERAND (expr, 0);
9695 ipart = TREE_OPERAND (expr, 1);
9696 }
9697 else if (TREE_CODE (expr) == COMPLEX_CST)
9698 {
9699 rpart = TREE_REALPART (expr);
9700 ipart = TREE_IMAGPART (expr);
9701 }
9702 else
9703 {
9704 expr = save_expr (expr);
9705 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9706 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9707 }
9708
9709 rpart = save_expr (rpart);
9710 ipart = save_expr (ipart);
9711 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9712 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9713 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9714 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9715 build_zero_cst (itype));
9716 }
9717
9718
9719 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9720 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9721 guarantees that P and N have the same least significant log2(M) bits.
9722 N is not otherwise constrained. In particular, N is not normalized to
9723 0 <= N < M as is common. In general, the precise value of P is unknown.
9724 M is chosen as large as possible such that constant N can be determined.
9725
9726 Returns M and sets *RESIDUE to N.
9727
9728 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9729 account. This is not always possible due to PR 35705.
9730 */
9731
9732 static unsigned HOST_WIDE_INT
9733 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9734 bool allow_func_align)
9735 {
9736 enum tree_code code;
9737
9738 *residue = 0;
9739
9740 code = TREE_CODE (expr);
9741 if (code == ADDR_EXPR)
9742 {
9743 unsigned int bitalign;
9744 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9745 *residue /= BITS_PER_UNIT;
9746 return bitalign / BITS_PER_UNIT;
9747 }
9748 else if (code == POINTER_PLUS_EXPR)
9749 {
9750 tree op0, op1;
9751 unsigned HOST_WIDE_INT modulus;
9752 enum tree_code inner_code;
9753
9754 op0 = TREE_OPERAND (expr, 0);
9755 STRIP_NOPS (op0);
9756 modulus = get_pointer_modulus_and_residue (op0, residue,
9757 allow_func_align);
9758
9759 op1 = TREE_OPERAND (expr, 1);
9760 STRIP_NOPS (op1);
9761 inner_code = TREE_CODE (op1);
9762 if (inner_code == INTEGER_CST)
9763 {
9764 *residue += TREE_INT_CST_LOW (op1);
9765 return modulus;
9766 }
9767 else if (inner_code == MULT_EXPR)
9768 {
9769 op1 = TREE_OPERAND (op1, 1);
9770 if (TREE_CODE (op1) == INTEGER_CST)
9771 {
9772 unsigned HOST_WIDE_INT align;
9773
9774 /* Compute the greatest power-of-2 divisor of op1. */
9775 align = TREE_INT_CST_LOW (op1);
9776 align &= -align;
9777
9778 /* If align is non-zero and less than *modulus, replace
9779 *modulus with align., If align is 0, then either op1 is 0
9780 or the greatest power-of-2 divisor of op1 doesn't fit in an
9781 unsigned HOST_WIDE_INT. In either case, no additional
9782 constraint is imposed. */
9783 if (align)
9784 modulus = MIN (modulus, align);
9785
9786 return modulus;
9787 }
9788 }
9789 }
9790
9791 /* If we get here, we were unable to determine anything useful about the
9792 expression. */
9793 return 1;
9794 }
9795
9796 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9797 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9798
9799 static bool
9800 vec_cst_ctor_to_array (tree arg, tree *elts)
9801 {
9802 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9803
9804 if (TREE_CODE (arg) == VECTOR_CST)
9805 {
9806 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9807 elts[i] = VECTOR_CST_ELT (arg, i);
9808 }
9809 else if (TREE_CODE (arg) == CONSTRUCTOR)
9810 {
9811 constructor_elt *elt;
9812
9813 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9814 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9815 return false;
9816 else
9817 elts[i] = elt->value;
9818 }
9819 else
9820 return false;
9821 for (; i < nelts; i++)
9822 elts[i]
9823 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9824 return true;
9825 }
9826
9827 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9828 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9829 NULL_TREE otherwise. */
9830
9831 static tree
9832 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9833 {
9834 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9835 tree *elts;
9836 bool need_ctor = false;
9837
9838 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9839 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9840 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9841 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9842 return NULL_TREE;
9843
9844 elts = XALLOCAVEC (tree, nelts * 3);
9845 if (!vec_cst_ctor_to_array (arg0, elts)
9846 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9847 return NULL_TREE;
9848
9849 for (i = 0; i < nelts; i++)
9850 {
9851 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9852 need_ctor = true;
9853 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9854 }
9855
9856 if (need_ctor)
9857 {
9858 vec<constructor_elt, va_gc> *v;
9859 vec_alloc (v, nelts);
9860 for (i = 0; i < nelts; i++)
9861 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9862 return build_constructor (type, v);
9863 }
9864 else
9865 return build_vector (type, &elts[2 * nelts]);
9866 }
9867
9868 /* Try to fold a pointer difference of type TYPE two address expressions of
9869 array references AREF0 and AREF1 using location LOC. Return a
9870 simplified expression for the difference or NULL_TREE. */
9871
9872 static tree
9873 fold_addr_of_array_ref_difference (location_t loc, tree type,
9874 tree aref0, tree aref1)
9875 {
9876 tree base0 = TREE_OPERAND (aref0, 0);
9877 tree base1 = TREE_OPERAND (aref1, 0);
9878 tree base_offset = build_int_cst (type, 0);
9879
9880 /* If the bases are array references as well, recurse. If the bases
9881 are pointer indirections compute the difference of the pointers.
9882 If the bases are equal, we are set. */
9883 if ((TREE_CODE (base0) == ARRAY_REF
9884 && TREE_CODE (base1) == ARRAY_REF
9885 && (base_offset
9886 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9887 || (INDIRECT_REF_P (base0)
9888 && INDIRECT_REF_P (base1)
9889 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9890 TREE_OPERAND (base0, 0),
9891 TREE_OPERAND (base1, 0))))
9892 || operand_equal_p (base0, base1, 0))
9893 {
9894 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9895 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9896 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9897 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9898 return fold_build2_loc (loc, PLUS_EXPR, type,
9899 base_offset,
9900 fold_build2_loc (loc, MULT_EXPR, type,
9901 diff, esz));
9902 }
9903 return NULL_TREE;
9904 }
9905
9906 /* If the real or vector real constant CST of type TYPE has an exact
9907 inverse, return it, else return NULL. */
9908
9909 static tree
9910 exact_inverse (tree type, tree cst)
9911 {
9912 REAL_VALUE_TYPE r;
9913 tree unit_type, *elts;
9914 enum machine_mode mode;
9915 unsigned vec_nelts, i;
9916
9917 switch (TREE_CODE (cst))
9918 {
9919 case REAL_CST:
9920 r = TREE_REAL_CST (cst);
9921
9922 if (exact_real_inverse (TYPE_MODE (type), &r))
9923 return build_real (type, r);
9924
9925 return NULL_TREE;
9926
9927 case VECTOR_CST:
9928 vec_nelts = VECTOR_CST_NELTS (cst);
9929 elts = XALLOCAVEC (tree, vec_nelts);
9930 unit_type = TREE_TYPE (type);
9931 mode = TYPE_MODE (unit_type);
9932
9933 for (i = 0; i < vec_nelts; i++)
9934 {
9935 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9936 if (!exact_real_inverse (mode, &r))
9937 return NULL_TREE;
9938 elts[i] = build_real (unit_type, r);
9939 }
9940
9941 return build_vector (type, elts);
9942
9943 default:
9944 return NULL_TREE;
9945 }
9946 }
9947
9948 /* Mask out the tz least significant bits of X of type TYPE where
9949 tz is the number of trailing zeroes in Y. */
9950 static double_int
9951 mask_with_tz (tree type, double_int x, double_int y)
9952 {
9953 int tz = y.trailing_zeros ();
9954
9955 if (tz > 0)
9956 {
9957 double_int mask;
9958
9959 mask = ~double_int::mask (tz);
9960 mask = mask.ext (TYPE_PRECISION (type), TYPE_UNSIGNED (type));
9961 return mask & x;
9962 }
9963 return x;
9964 }
9965
9966 /* Return true when T is an address and is known to be nonzero.
9967 For floating point we further ensure that T is not denormal.
9968 Similar logic is present in nonzero_address in rtlanal.h.
9969
9970 If the return value is based on the assumption that signed overflow
9971 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9972 change *STRICT_OVERFLOW_P. */
9973
9974 static bool
9975 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9976 {
9977 tree type = TREE_TYPE (t);
9978 enum tree_code code;
9979
9980 /* Doing something useful for floating point would need more work. */
9981 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9982 return false;
9983
9984 code = TREE_CODE (t);
9985 switch (TREE_CODE_CLASS (code))
9986 {
9987 case tcc_unary:
9988 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9989 strict_overflow_p);
9990 case tcc_binary:
9991 case tcc_comparison:
9992 return tree_binary_nonzero_warnv_p (code, type,
9993 TREE_OPERAND (t, 0),
9994 TREE_OPERAND (t, 1),
9995 strict_overflow_p);
9996 case tcc_constant:
9997 case tcc_declaration:
9998 case tcc_reference:
9999 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10000
10001 default:
10002 break;
10003 }
10004
10005 switch (code)
10006 {
10007 case TRUTH_NOT_EXPR:
10008 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
10009 strict_overflow_p);
10010
10011 case TRUTH_AND_EXPR:
10012 case TRUTH_OR_EXPR:
10013 case TRUTH_XOR_EXPR:
10014 return tree_binary_nonzero_warnv_p (code, type,
10015 TREE_OPERAND (t, 0),
10016 TREE_OPERAND (t, 1),
10017 strict_overflow_p);
10018
10019 case COND_EXPR:
10020 case CONSTRUCTOR:
10021 case OBJ_TYPE_REF:
10022 case ASSERT_EXPR:
10023 case ADDR_EXPR:
10024 case WITH_SIZE_EXPR:
10025 case SSA_NAME:
10026 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
10027
10028 case COMPOUND_EXPR:
10029 case MODIFY_EXPR:
10030 case BIND_EXPR:
10031 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
10032 strict_overflow_p);
10033
10034 case SAVE_EXPR:
10035 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
10036 strict_overflow_p);
10037
10038 case CALL_EXPR:
10039 {
10040 tree fndecl = get_callee_fndecl (t);
10041 if (!fndecl) return false;
10042 if (flag_delete_null_pointer_checks && !flag_check_new
10043 && DECL_IS_OPERATOR_NEW (fndecl)
10044 && !TREE_NOTHROW (fndecl))
10045 return true;
10046 if (flag_delete_null_pointer_checks
10047 && lookup_attribute ("returns_nonnull",
10048 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
10049 return true;
10050 return alloca_call_p (t);
10051 }
10052
10053 default:
10054 break;
10055 }
10056 return false;
10057 }
10058
10059 /* Return true when T is an address and is known to be nonzero.
10060 Handle warnings about undefined signed overflow. */
10061
10062 static bool
10063 tree_expr_nonzero_p (tree t)
10064 {
10065 bool ret, strict_overflow_p;
10066
10067 strict_overflow_p = false;
10068 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
10069 if (strict_overflow_p)
10070 fold_overflow_warning (("assuming signed overflow does not occur when "
10071 "determining that expression is always "
10072 "non-zero"),
10073 WARN_STRICT_OVERFLOW_MISC);
10074 return ret;
10075 }
10076
10077 /* Fold a binary expression of code CODE and type TYPE with operands
10078 OP0 and OP1. LOC is the location of the resulting expression.
10079 Return the folded expression if folding is successful. Otherwise,
10080 return NULL_TREE. */
10081
10082 tree
10083 fold_binary_loc (location_t loc,
10084 enum tree_code code, tree type, tree op0, tree op1)
10085 {
10086 enum tree_code_class kind = TREE_CODE_CLASS (code);
10087 tree arg0, arg1, tem;
10088 tree t1 = NULL_TREE;
10089 bool strict_overflow_p;
10090 unsigned int prec;
10091
10092 gcc_assert (IS_EXPR_CODE_CLASS (kind)
10093 && TREE_CODE_LENGTH (code) == 2
10094 && op0 != NULL_TREE
10095 && op1 != NULL_TREE);
10096
10097 arg0 = op0;
10098 arg1 = op1;
10099
10100 /* Strip any conversions that don't change the mode. This is
10101 safe for every expression, except for a comparison expression
10102 because its signedness is derived from its operands. So, in
10103 the latter case, only strip conversions that don't change the
10104 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
10105 preserved.
10106
10107 Note that this is done as an internal manipulation within the
10108 constant folder, in order to find the simplest representation
10109 of the arguments so that their form can be studied. In any
10110 cases, the appropriate type conversions should be put back in
10111 the tree that will get out of the constant folder. */
10112
10113 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
10114 {
10115 STRIP_SIGN_NOPS (arg0);
10116 STRIP_SIGN_NOPS (arg1);
10117 }
10118 else
10119 {
10120 STRIP_NOPS (arg0);
10121 STRIP_NOPS (arg1);
10122 }
10123
10124 /* Note that TREE_CONSTANT isn't enough: static var addresses are
10125 constant but we can't do arithmetic on them. */
10126 if ((TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10127 || (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
10128 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == FIXED_CST)
10129 || (TREE_CODE (arg0) == FIXED_CST && TREE_CODE (arg1) == INTEGER_CST)
10130 || (TREE_CODE (arg0) == COMPLEX_CST && TREE_CODE (arg1) == COMPLEX_CST)
10131 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == VECTOR_CST)
10132 || (TREE_CODE (arg0) == VECTOR_CST && TREE_CODE (arg1) == INTEGER_CST))
10133 {
10134 if (kind == tcc_binary)
10135 {
10136 /* Make sure type and arg0 have the same saturating flag. */
10137 gcc_assert (TYPE_SATURATING (type)
10138 == TYPE_SATURATING (TREE_TYPE (arg0)));
10139 tem = const_binop (code, arg0, arg1);
10140 }
10141 else if (kind == tcc_comparison)
10142 tem = fold_relational_const (code, type, arg0, arg1);
10143 else
10144 tem = NULL_TREE;
10145
10146 if (tem != NULL_TREE)
10147 {
10148 if (TREE_TYPE (tem) != type)
10149 tem = fold_convert_loc (loc, type, tem);
10150 return tem;
10151 }
10152 }
10153
10154 /* If this is a commutative operation, and ARG0 is a constant, move it
10155 to ARG1 to reduce the number of tests below. */
10156 if (commutative_tree_code (code)
10157 && tree_swap_operands_p (arg0, arg1, true))
10158 return fold_build2_loc (loc, code, type, op1, op0);
10159
10160 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
10161
10162 First check for cases where an arithmetic operation is applied to a
10163 compound, conditional, or comparison operation. Push the arithmetic
10164 operation inside the compound or conditional to see if any folding
10165 can then be done. Convert comparison to conditional for this purpose.
10166 The also optimizes non-constant cases that used to be done in
10167 expand_expr.
10168
10169 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
10170 one of the operands is a comparison and the other is a comparison, a
10171 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
10172 code below would make the expression more complex. Change it to a
10173 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
10174 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
10175
10176 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
10177 || code == EQ_EXPR || code == NE_EXPR)
10178 && TREE_CODE (type) != VECTOR_TYPE
10179 && ((truth_value_p (TREE_CODE (arg0))
10180 && (truth_value_p (TREE_CODE (arg1))
10181 || (TREE_CODE (arg1) == BIT_AND_EXPR
10182 && integer_onep (TREE_OPERAND (arg1, 1)))))
10183 || (truth_value_p (TREE_CODE (arg1))
10184 && (truth_value_p (TREE_CODE (arg0))
10185 || (TREE_CODE (arg0) == BIT_AND_EXPR
10186 && integer_onep (TREE_OPERAND (arg0, 1)))))))
10187 {
10188 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
10189 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
10190 : TRUTH_XOR_EXPR,
10191 boolean_type_node,
10192 fold_convert_loc (loc, boolean_type_node, arg0),
10193 fold_convert_loc (loc, boolean_type_node, arg1));
10194
10195 if (code == EQ_EXPR)
10196 tem = invert_truthvalue_loc (loc, tem);
10197
10198 return fold_convert_loc (loc, type, tem);
10199 }
10200
10201 if (TREE_CODE_CLASS (code) == tcc_binary
10202 || TREE_CODE_CLASS (code) == tcc_comparison)
10203 {
10204 if (TREE_CODE (arg0) == COMPOUND_EXPR)
10205 {
10206 tem = fold_build2_loc (loc, code, type,
10207 fold_convert_loc (loc, TREE_TYPE (op0),
10208 TREE_OPERAND (arg0, 1)), op1);
10209 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
10210 tem);
10211 }
10212 if (TREE_CODE (arg1) == COMPOUND_EXPR
10213 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10214 {
10215 tem = fold_build2_loc (loc, code, type, op0,
10216 fold_convert_loc (loc, TREE_TYPE (op1),
10217 TREE_OPERAND (arg1, 1)));
10218 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10219 tem);
10220 }
10221
10222 if (TREE_CODE (arg0) == COND_EXPR
10223 || TREE_CODE (arg0) == VEC_COND_EXPR
10224 || COMPARISON_CLASS_P (arg0))
10225 {
10226 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10227 arg0, arg1,
10228 /*cond_first_p=*/1);
10229 if (tem != NULL_TREE)
10230 return tem;
10231 }
10232
10233 if (TREE_CODE (arg1) == COND_EXPR
10234 || TREE_CODE (arg1) == VEC_COND_EXPR
10235 || COMPARISON_CLASS_P (arg1))
10236 {
10237 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10238 arg1, arg0,
10239 /*cond_first_p=*/0);
10240 if (tem != NULL_TREE)
10241 return tem;
10242 }
10243 }
10244
10245 switch (code)
10246 {
10247 case MEM_REF:
10248 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10249 if (TREE_CODE (arg0) == ADDR_EXPR
10250 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10251 {
10252 tree iref = TREE_OPERAND (arg0, 0);
10253 return fold_build2 (MEM_REF, type,
10254 TREE_OPERAND (iref, 0),
10255 int_const_binop (PLUS_EXPR, arg1,
10256 TREE_OPERAND (iref, 1)));
10257 }
10258
10259 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10260 if (TREE_CODE (arg0) == ADDR_EXPR
10261 && handled_component_p (TREE_OPERAND (arg0, 0)))
10262 {
10263 tree base;
10264 HOST_WIDE_INT coffset;
10265 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10266 &coffset);
10267 if (!base)
10268 return NULL_TREE;
10269 return fold_build2 (MEM_REF, type,
10270 build_fold_addr_expr (base),
10271 int_const_binop (PLUS_EXPR, arg1,
10272 size_int (coffset)));
10273 }
10274
10275 return NULL_TREE;
10276
10277 case POINTER_PLUS_EXPR:
10278 /* 0 +p index -> (type)index */
10279 if (integer_zerop (arg0))
10280 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10281
10282 /* PTR +p 0 -> PTR */
10283 if (integer_zerop (arg1))
10284 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10285
10286 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10287 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10288 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10289 return fold_convert_loc (loc, type,
10290 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10291 fold_convert_loc (loc, sizetype,
10292 arg1),
10293 fold_convert_loc (loc, sizetype,
10294 arg0)));
10295
10296 /* (PTR +p B) +p A -> PTR +p (B + A) */
10297 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10298 {
10299 tree inner;
10300 tree arg01 = fold_convert_loc (loc, sizetype, TREE_OPERAND (arg0, 1));
10301 tree arg00 = TREE_OPERAND (arg0, 0);
10302 inner = fold_build2_loc (loc, PLUS_EXPR, sizetype,
10303 arg01, fold_convert_loc (loc, sizetype, arg1));
10304 return fold_convert_loc (loc, type,
10305 fold_build_pointer_plus_loc (loc,
10306 arg00, inner));
10307 }
10308
10309 /* PTR_CST +p CST -> CST1 */
10310 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
10311 return fold_build2_loc (loc, PLUS_EXPR, type, arg0,
10312 fold_convert_loc (loc, type, arg1));
10313
10314 /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
10315 of the array. Loop optimizer sometimes produce this type of
10316 expressions. */
10317 if (TREE_CODE (arg0) == ADDR_EXPR)
10318 {
10319 tem = try_move_mult_to_index (loc, arg0,
10320 fold_convert_loc (loc,
10321 ssizetype, arg1));
10322 if (tem)
10323 return fold_convert_loc (loc, type, tem);
10324 }
10325
10326 return NULL_TREE;
10327
10328 case PLUS_EXPR:
10329 /* A + (-B) -> A - B */
10330 if (TREE_CODE (arg1) == NEGATE_EXPR)
10331 return fold_build2_loc (loc, MINUS_EXPR, type,
10332 fold_convert_loc (loc, type, arg0),
10333 fold_convert_loc (loc, type,
10334 TREE_OPERAND (arg1, 0)));
10335 /* (-A) + B -> B - A */
10336 if (TREE_CODE (arg0) == NEGATE_EXPR
10337 && reorder_operands_p (TREE_OPERAND (arg0, 0), arg1))
10338 return fold_build2_loc (loc, MINUS_EXPR, type,
10339 fold_convert_loc (loc, type, arg1),
10340 fold_convert_loc (loc, type,
10341 TREE_OPERAND (arg0, 0)));
10342
10343 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10344 {
10345 /* Convert ~A + 1 to -A. */
10346 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10347 && integer_onep (arg1))
10348 return fold_build1_loc (loc, NEGATE_EXPR, type,
10349 fold_convert_loc (loc, type,
10350 TREE_OPERAND (arg0, 0)));
10351
10352 /* ~X + X is -1. */
10353 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10354 && !TYPE_OVERFLOW_TRAPS (type))
10355 {
10356 tree tem = TREE_OPERAND (arg0, 0);
10357
10358 STRIP_NOPS (tem);
10359 if (operand_equal_p (tem, arg1, 0))
10360 {
10361 t1 = build_all_ones_cst (type);
10362 return omit_one_operand_loc (loc, type, t1, arg1);
10363 }
10364 }
10365
10366 /* X + ~X is -1. */
10367 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10368 && !TYPE_OVERFLOW_TRAPS (type))
10369 {
10370 tree tem = TREE_OPERAND (arg1, 0);
10371
10372 STRIP_NOPS (tem);
10373 if (operand_equal_p (arg0, tem, 0))
10374 {
10375 t1 = build_all_ones_cst (type);
10376 return omit_one_operand_loc (loc, type, t1, arg0);
10377 }
10378 }
10379
10380 /* X + (X / CST) * -CST is X % CST. */
10381 if (TREE_CODE (arg1) == MULT_EXPR
10382 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10383 && operand_equal_p (arg0,
10384 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10385 {
10386 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10387 tree cst1 = TREE_OPERAND (arg1, 1);
10388 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10389 cst1, cst0);
10390 if (sum && integer_zerop (sum))
10391 return fold_convert_loc (loc, type,
10392 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10393 TREE_TYPE (arg0), arg0,
10394 cst0));
10395 }
10396 }
10397
10398 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10399 one. Make sure the type is not saturating and has the signedness of
10400 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10401 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10402 if ((TREE_CODE (arg0) == MULT_EXPR
10403 || TREE_CODE (arg1) == MULT_EXPR)
10404 && !TYPE_SATURATING (type)
10405 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10406 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10407 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10408 {
10409 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10410 if (tem)
10411 return tem;
10412 }
10413
10414 if (! FLOAT_TYPE_P (type))
10415 {
10416 if (integer_zerop (arg1))
10417 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10418
10419 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10420 with a constant, and the two constants have no bits in common,
10421 we should treat this as a BIT_IOR_EXPR since this may produce more
10422 simplifications. */
10423 if (TREE_CODE (arg0) == BIT_AND_EXPR
10424 && TREE_CODE (arg1) == BIT_AND_EXPR
10425 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10426 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10427 && integer_zerop (const_binop (BIT_AND_EXPR,
10428 TREE_OPERAND (arg0, 1),
10429 TREE_OPERAND (arg1, 1))))
10430 {
10431 code = BIT_IOR_EXPR;
10432 goto bit_ior;
10433 }
10434
10435 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10436 (plus (plus (mult) (mult)) (foo)) so that we can
10437 take advantage of the factoring cases below. */
10438 if (TYPE_OVERFLOW_WRAPS (type)
10439 && (((TREE_CODE (arg0) == PLUS_EXPR
10440 || TREE_CODE (arg0) == MINUS_EXPR)
10441 && TREE_CODE (arg1) == MULT_EXPR)
10442 || ((TREE_CODE (arg1) == PLUS_EXPR
10443 || TREE_CODE (arg1) == MINUS_EXPR)
10444 && TREE_CODE (arg0) == MULT_EXPR)))
10445 {
10446 tree parg0, parg1, parg, marg;
10447 enum tree_code pcode;
10448
10449 if (TREE_CODE (arg1) == MULT_EXPR)
10450 parg = arg0, marg = arg1;
10451 else
10452 parg = arg1, marg = arg0;
10453 pcode = TREE_CODE (parg);
10454 parg0 = TREE_OPERAND (parg, 0);
10455 parg1 = TREE_OPERAND (parg, 1);
10456 STRIP_NOPS (parg0);
10457 STRIP_NOPS (parg1);
10458
10459 if (TREE_CODE (parg0) == MULT_EXPR
10460 && TREE_CODE (parg1) != MULT_EXPR)
10461 return fold_build2_loc (loc, pcode, type,
10462 fold_build2_loc (loc, PLUS_EXPR, type,
10463 fold_convert_loc (loc, type,
10464 parg0),
10465 fold_convert_loc (loc, type,
10466 marg)),
10467 fold_convert_loc (loc, type, parg1));
10468 if (TREE_CODE (parg0) != MULT_EXPR
10469 && TREE_CODE (parg1) == MULT_EXPR)
10470 return
10471 fold_build2_loc (loc, PLUS_EXPR, type,
10472 fold_convert_loc (loc, type, parg0),
10473 fold_build2_loc (loc, pcode, type,
10474 fold_convert_loc (loc, type, marg),
10475 fold_convert_loc (loc, type,
10476 parg1)));
10477 }
10478 }
10479 else
10480 {
10481 /* See if ARG1 is zero and X + ARG1 reduces to X. */
10482 if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 0))
10483 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10484
10485 /* Likewise if the operands are reversed. */
10486 if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10487 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10488
10489 /* Convert X + -C into X - C. */
10490 if (TREE_CODE (arg1) == REAL_CST
10491 && REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1)))
10492 {
10493 tem = fold_negate_const (arg1, type);
10494 if (!TREE_OVERFLOW (arg1) || !flag_trapping_math)
10495 return fold_build2_loc (loc, MINUS_EXPR, type,
10496 fold_convert_loc (loc, type, arg0),
10497 fold_convert_loc (loc, type, tem));
10498 }
10499
10500 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10501 to __complex__ ( x, y ). This is not the same for SNaNs or
10502 if signed zeros are involved. */
10503 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10504 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10505 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10506 {
10507 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10508 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10509 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10510 bool arg0rz = false, arg0iz = false;
10511 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10512 || (arg0i && (arg0iz = real_zerop (arg0i))))
10513 {
10514 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10515 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10516 if (arg0rz && arg1i && real_zerop (arg1i))
10517 {
10518 tree rp = arg1r ? arg1r
10519 : build1 (REALPART_EXPR, rtype, arg1);
10520 tree ip = arg0i ? arg0i
10521 : build1 (IMAGPART_EXPR, rtype, arg0);
10522 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10523 }
10524 else if (arg0iz && arg1r && real_zerop (arg1r))
10525 {
10526 tree rp = arg0r ? arg0r
10527 : build1 (REALPART_EXPR, rtype, arg0);
10528 tree ip = arg1i ? arg1i
10529 : build1 (IMAGPART_EXPR, rtype, arg1);
10530 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10531 }
10532 }
10533 }
10534
10535 if (flag_unsafe_math_optimizations
10536 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10537 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10538 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10539 return tem;
10540
10541 /* Convert x+x into x*2.0. */
10542 if (operand_equal_p (arg0, arg1, 0)
10543 && SCALAR_FLOAT_TYPE_P (type))
10544 return fold_build2_loc (loc, MULT_EXPR, type, arg0,
10545 build_real (type, dconst2));
10546
10547 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10548 We associate floats only if the user has specified
10549 -fassociative-math. */
10550 if (flag_associative_math
10551 && TREE_CODE (arg1) == PLUS_EXPR
10552 && TREE_CODE (arg0) != MULT_EXPR)
10553 {
10554 tree tree10 = TREE_OPERAND (arg1, 0);
10555 tree tree11 = TREE_OPERAND (arg1, 1);
10556 if (TREE_CODE (tree11) == MULT_EXPR
10557 && TREE_CODE (tree10) == MULT_EXPR)
10558 {
10559 tree tree0;
10560 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10561 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10562 }
10563 }
10564 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10565 We associate floats only if the user has specified
10566 -fassociative-math. */
10567 if (flag_associative_math
10568 && TREE_CODE (arg0) == PLUS_EXPR
10569 && TREE_CODE (arg1) != MULT_EXPR)
10570 {
10571 tree tree00 = TREE_OPERAND (arg0, 0);
10572 tree tree01 = TREE_OPERAND (arg0, 1);
10573 if (TREE_CODE (tree01) == MULT_EXPR
10574 && TREE_CODE (tree00) == MULT_EXPR)
10575 {
10576 tree tree0;
10577 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10578 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10579 }
10580 }
10581 }
10582
10583 bit_rotate:
10584 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10585 is a rotate of A by C1 bits. */
10586 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10587 is a rotate of A by B bits. */
10588 {
10589 enum tree_code code0, code1;
10590 tree rtype;
10591 code0 = TREE_CODE (arg0);
10592 code1 = TREE_CODE (arg1);
10593 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10594 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10595 && operand_equal_p (TREE_OPERAND (arg0, 0),
10596 TREE_OPERAND (arg1, 0), 0)
10597 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10598 TYPE_UNSIGNED (rtype))
10599 /* Only create rotates in complete modes. Other cases are not
10600 expanded properly. */
10601 && (element_precision (rtype)
10602 == element_precision (TYPE_MODE (rtype))))
10603 {
10604 tree tree01, tree11;
10605 enum tree_code code01, code11;
10606
10607 tree01 = TREE_OPERAND (arg0, 1);
10608 tree11 = TREE_OPERAND (arg1, 1);
10609 STRIP_NOPS (tree01);
10610 STRIP_NOPS (tree11);
10611 code01 = TREE_CODE (tree01);
10612 code11 = TREE_CODE (tree11);
10613 if (code01 == INTEGER_CST
10614 && code11 == INTEGER_CST
10615 && TREE_INT_CST_HIGH (tree01) == 0
10616 && TREE_INT_CST_HIGH (tree11) == 0
10617 && ((TREE_INT_CST_LOW (tree01) + TREE_INT_CST_LOW (tree11))
10618 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10619 {
10620 tem = build2_loc (loc, LROTATE_EXPR,
10621 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10622 TREE_OPERAND (arg0, 0),
10623 code0 == LSHIFT_EXPR ? tree01 : tree11);
10624 return fold_convert_loc (loc, type, tem);
10625 }
10626 else if (code11 == MINUS_EXPR)
10627 {
10628 tree tree110, tree111;
10629 tree110 = TREE_OPERAND (tree11, 0);
10630 tree111 = TREE_OPERAND (tree11, 1);
10631 STRIP_NOPS (tree110);
10632 STRIP_NOPS (tree111);
10633 if (TREE_CODE (tree110) == INTEGER_CST
10634 && 0 == compare_tree_int (tree110,
10635 element_precision
10636 (TREE_TYPE (TREE_OPERAND
10637 (arg0, 0))))
10638 && operand_equal_p (tree01, tree111, 0))
10639 return
10640 fold_convert_loc (loc, type,
10641 build2 ((code0 == LSHIFT_EXPR
10642 ? LROTATE_EXPR
10643 : RROTATE_EXPR),
10644 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10645 TREE_OPERAND (arg0, 0), tree01));
10646 }
10647 else if (code01 == MINUS_EXPR)
10648 {
10649 tree tree010, tree011;
10650 tree010 = TREE_OPERAND (tree01, 0);
10651 tree011 = TREE_OPERAND (tree01, 1);
10652 STRIP_NOPS (tree010);
10653 STRIP_NOPS (tree011);
10654 if (TREE_CODE (tree010) == INTEGER_CST
10655 && 0 == compare_tree_int (tree010,
10656 element_precision
10657 (TREE_TYPE (TREE_OPERAND
10658 (arg0, 0))))
10659 && operand_equal_p (tree11, tree011, 0))
10660 return fold_convert_loc
10661 (loc, type,
10662 build2 ((code0 != LSHIFT_EXPR
10663 ? LROTATE_EXPR
10664 : RROTATE_EXPR),
10665 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10666 TREE_OPERAND (arg0, 0), tree11));
10667 }
10668 }
10669 }
10670
10671 associate:
10672 /* In most languages, can't associate operations on floats through
10673 parentheses. Rather than remember where the parentheses were, we
10674 don't associate floats at all, unless the user has specified
10675 -fassociative-math.
10676 And, we need to make sure type is not saturating. */
10677
10678 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10679 && !TYPE_SATURATING (type))
10680 {
10681 tree var0, con0, lit0, minus_lit0;
10682 tree var1, con1, lit1, minus_lit1;
10683 tree atype = type;
10684 bool ok = true;
10685
10686 /* Split both trees into variables, constants, and literals. Then
10687 associate each group together, the constants with literals,
10688 then the result with variables. This increases the chances of
10689 literals being recombined later and of generating relocatable
10690 expressions for the sum of a constant and literal. */
10691 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10692 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10693 code == MINUS_EXPR);
10694
10695 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10696 if (code == MINUS_EXPR)
10697 code = PLUS_EXPR;
10698
10699 /* With undefined overflow prefer doing association in a type
10700 which wraps on overflow, if that is one of the operand types. */
10701 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10702 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10703 {
10704 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10705 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10706 atype = TREE_TYPE (arg0);
10707 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10708 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10709 atype = TREE_TYPE (arg1);
10710 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10711 }
10712
10713 /* With undefined overflow we can only associate constants with one
10714 variable, and constants whose association doesn't overflow. */
10715 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10716 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10717 {
10718 if (var0 && var1)
10719 {
10720 tree tmp0 = var0;
10721 tree tmp1 = var1;
10722
10723 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10724 tmp0 = TREE_OPERAND (tmp0, 0);
10725 if (CONVERT_EXPR_P (tmp0)
10726 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10727 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10728 <= TYPE_PRECISION (atype)))
10729 tmp0 = TREE_OPERAND (tmp0, 0);
10730 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10731 tmp1 = TREE_OPERAND (tmp1, 0);
10732 if (CONVERT_EXPR_P (tmp1)
10733 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10734 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10735 <= TYPE_PRECISION (atype)))
10736 tmp1 = TREE_OPERAND (tmp1, 0);
10737 /* The only case we can still associate with two variables
10738 is if they are the same, modulo negation and bit-pattern
10739 preserving conversions. */
10740 if (!operand_equal_p (tmp0, tmp1, 0))
10741 ok = false;
10742 }
10743 }
10744
10745 /* Only do something if we found more than two objects. Otherwise,
10746 nothing has changed and we risk infinite recursion. */
10747 if (ok
10748 && (2 < ((var0 != 0) + (var1 != 0)
10749 + (con0 != 0) + (con1 != 0)
10750 + (lit0 != 0) + (lit1 != 0)
10751 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10752 {
10753 bool any_overflows = false;
10754 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10755 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10756 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10757 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10758 var0 = associate_trees (loc, var0, var1, code, atype);
10759 con0 = associate_trees (loc, con0, con1, code, atype);
10760 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10761 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10762 code, atype);
10763
10764 /* Preserve the MINUS_EXPR if the negative part of the literal is
10765 greater than the positive part. Otherwise, the multiplicative
10766 folding code (i.e extract_muldiv) may be fooled in case
10767 unsigned constants are subtracted, like in the following
10768 example: ((X*2 + 4) - 8U)/2. */
10769 if (minus_lit0 && lit0)
10770 {
10771 if (TREE_CODE (lit0) == INTEGER_CST
10772 && TREE_CODE (minus_lit0) == INTEGER_CST
10773 && tree_int_cst_lt (lit0, minus_lit0))
10774 {
10775 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10776 MINUS_EXPR, atype);
10777 lit0 = 0;
10778 }
10779 else
10780 {
10781 lit0 = associate_trees (loc, lit0, minus_lit0,
10782 MINUS_EXPR, atype);
10783 minus_lit0 = 0;
10784 }
10785 }
10786
10787 /* Don't introduce overflows through reassociation. */
10788 if (!any_overflows
10789 && ((lit0 && TREE_OVERFLOW (lit0))
10790 || (minus_lit0 && TREE_OVERFLOW (minus_lit0))))
10791 return NULL_TREE;
10792
10793 if (minus_lit0)
10794 {
10795 if (con0 == 0)
10796 return
10797 fold_convert_loc (loc, type,
10798 associate_trees (loc, var0, minus_lit0,
10799 MINUS_EXPR, atype));
10800 else
10801 {
10802 con0 = associate_trees (loc, con0, minus_lit0,
10803 MINUS_EXPR, atype);
10804 return
10805 fold_convert_loc (loc, type,
10806 associate_trees (loc, var0, con0,
10807 PLUS_EXPR, atype));
10808 }
10809 }
10810
10811 con0 = associate_trees (loc, con0, lit0, code, atype);
10812 return
10813 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10814 code, atype));
10815 }
10816 }
10817
10818 return NULL_TREE;
10819
10820 case MINUS_EXPR:
10821 /* Pointer simplifications for subtraction, simple reassociations. */
10822 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10823 {
10824 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10825 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10826 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10827 {
10828 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10829 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10830 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10831 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10832 return fold_build2_loc (loc, PLUS_EXPR, type,
10833 fold_build2_loc (loc, MINUS_EXPR, type,
10834 arg00, arg10),
10835 fold_build2_loc (loc, MINUS_EXPR, type,
10836 arg01, arg11));
10837 }
10838 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10839 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10840 {
10841 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10842 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10843 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10844 fold_convert_loc (loc, type, arg1));
10845 if (tmp)
10846 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10847 }
10848 }
10849 /* A - (-B) -> A + B */
10850 if (TREE_CODE (arg1) == NEGATE_EXPR)
10851 return fold_build2_loc (loc, PLUS_EXPR, type, op0,
10852 fold_convert_loc (loc, type,
10853 TREE_OPERAND (arg1, 0)));
10854 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10855 if (TREE_CODE (arg0) == NEGATE_EXPR
10856 && negate_expr_p (arg1)
10857 && reorder_operands_p (arg0, arg1))
10858 return fold_build2_loc (loc, MINUS_EXPR, type,
10859 fold_convert_loc (loc, type,
10860 negate_expr (arg1)),
10861 fold_convert_loc (loc, type,
10862 TREE_OPERAND (arg0, 0)));
10863 /* Convert -A - 1 to ~A. */
10864 if (TREE_CODE (type) != COMPLEX_TYPE
10865 && TREE_CODE (arg0) == NEGATE_EXPR
10866 && integer_onep (arg1)
10867 && !TYPE_OVERFLOW_TRAPS (type))
10868 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
10869 fold_convert_loc (loc, type,
10870 TREE_OPERAND (arg0, 0)));
10871
10872 /* Convert -1 - A to ~A. */
10873 if (TREE_CODE (type) != COMPLEX_TYPE
10874 && integer_all_onesp (arg0))
10875 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op1);
10876
10877
10878 /* X - (X / Y) * Y is X % Y. */
10879 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10880 && TREE_CODE (arg1) == MULT_EXPR
10881 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10882 && operand_equal_p (arg0,
10883 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10884 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10885 TREE_OPERAND (arg1, 1), 0))
10886 return
10887 fold_convert_loc (loc, type,
10888 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10889 arg0, TREE_OPERAND (arg1, 1)));
10890
10891 if (! FLOAT_TYPE_P (type))
10892 {
10893 if (integer_zerop (arg0))
10894 return negate_expr (fold_convert_loc (loc, type, arg1));
10895 if (integer_zerop (arg1))
10896 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10897
10898 /* Fold A - (A & B) into ~B & A. */
10899 if (!TREE_SIDE_EFFECTS (arg0)
10900 && TREE_CODE (arg1) == BIT_AND_EXPR)
10901 {
10902 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10903 {
10904 tree arg10 = fold_convert_loc (loc, type,
10905 TREE_OPERAND (arg1, 0));
10906 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10907 fold_build1_loc (loc, BIT_NOT_EXPR,
10908 type, arg10),
10909 fold_convert_loc (loc, type, arg0));
10910 }
10911 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10912 {
10913 tree arg11 = fold_convert_loc (loc,
10914 type, TREE_OPERAND (arg1, 1));
10915 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10916 fold_build1_loc (loc, BIT_NOT_EXPR,
10917 type, arg11),
10918 fold_convert_loc (loc, type, arg0));
10919 }
10920 }
10921
10922 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10923 any power of 2 minus 1. */
10924 if (TREE_CODE (arg0) == BIT_AND_EXPR
10925 && TREE_CODE (arg1) == BIT_AND_EXPR
10926 && operand_equal_p (TREE_OPERAND (arg0, 0),
10927 TREE_OPERAND (arg1, 0), 0))
10928 {
10929 tree mask0 = TREE_OPERAND (arg0, 1);
10930 tree mask1 = TREE_OPERAND (arg1, 1);
10931 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10932
10933 if (operand_equal_p (tem, mask1, 0))
10934 {
10935 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10936 TREE_OPERAND (arg0, 0), mask1);
10937 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10938 }
10939 }
10940 }
10941
10942 /* See if ARG1 is zero and X - ARG1 reduces to X. */
10943 else if (fold_real_zero_addition_p (TREE_TYPE (arg0), arg1, 1))
10944 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10945
10946 /* (ARG0 - ARG1) is the same as (-ARG1 + ARG0). So check whether
10947 ARG0 is zero and X + ARG0 reduces to X, since that would mean
10948 (-ARG1 + ARG0) reduces to -ARG1. */
10949 else if (fold_real_zero_addition_p (TREE_TYPE (arg1), arg0, 0))
10950 return negate_expr (fold_convert_loc (loc, type, arg1));
10951
10952 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10953 __complex__ ( x, -y ). This is not the same for SNaNs or if
10954 signed zeros are involved. */
10955 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
10956 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
10957 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10958 {
10959 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10960 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10961 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10962 bool arg0rz = false, arg0iz = false;
10963 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10964 || (arg0i && (arg0iz = real_zerop (arg0i))))
10965 {
10966 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10967 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10968 if (arg0rz && arg1i && real_zerop (arg1i))
10969 {
10970 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10971 arg1r ? arg1r
10972 : build1 (REALPART_EXPR, rtype, arg1));
10973 tree ip = arg0i ? arg0i
10974 : build1 (IMAGPART_EXPR, rtype, arg0);
10975 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10976 }
10977 else if (arg0iz && arg1r && real_zerop (arg1r))
10978 {
10979 tree rp = arg0r ? arg0r
10980 : build1 (REALPART_EXPR, rtype, arg0);
10981 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10982 arg1i ? arg1i
10983 : build1 (IMAGPART_EXPR, rtype, arg1));
10984 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10985 }
10986 }
10987 }
10988
10989 /* Fold &x - &x. This can happen from &x.foo - &x.
10990 This is unsafe for certain floats even in non-IEEE formats.
10991 In IEEE, it is unsafe because it does wrong for NaNs.
10992 Also note that operand_equal_p is always false if an operand
10993 is volatile. */
10994
10995 if ((!FLOAT_TYPE_P (type) || !HONOR_NANS (TYPE_MODE (type)))
10996 && operand_equal_p (arg0, arg1, 0))
10997 return build_zero_cst (type);
10998
10999 /* A - B -> A + (-B) if B is easily negatable. */
11000 if (negate_expr_p (arg1)
11001 && ((FLOAT_TYPE_P (type)
11002 /* Avoid this transformation if B is a positive REAL_CST. */
11003 && (TREE_CODE (arg1) != REAL_CST
11004 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
11005 || INTEGRAL_TYPE_P (type)))
11006 return fold_build2_loc (loc, PLUS_EXPR, type,
11007 fold_convert_loc (loc, type, arg0),
11008 fold_convert_loc (loc, type,
11009 negate_expr (arg1)));
11010
11011 /* Try folding difference of addresses. */
11012 {
11013 HOST_WIDE_INT diff;
11014
11015 if ((TREE_CODE (arg0) == ADDR_EXPR
11016 || TREE_CODE (arg1) == ADDR_EXPR)
11017 && ptr_difference_const (arg0, arg1, &diff))
11018 return build_int_cst_type (type, diff);
11019 }
11020
11021 /* Fold &a[i] - &a[j] to i-j. */
11022 if (TREE_CODE (arg0) == ADDR_EXPR
11023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
11024 && TREE_CODE (arg1) == ADDR_EXPR
11025 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
11026 {
11027 tree tem = fold_addr_of_array_ref_difference (loc, type,
11028 TREE_OPERAND (arg0, 0),
11029 TREE_OPERAND (arg1, 0));
11030 if (tem)
11031 return tem;
11032 }
11033
11034 if (FLOAT_TYPE_P (type)
11035 && flag_unsafe_math_optimizations
11036 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
11037 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
11038 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
11039 return tem;
11040
11041 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
11042 one. Make sure the type is not saturating and has the signedness of
11043 the stripped operands, as fold_plusminus_mult_expr will re-associate.
11044 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
11045 if ((TREE_CODE (arg0) == MULT_EXPR
11046 || TREE_CODE (arg1) == MULT_EXPR)
11047 && !TYPE_SATURATING (type)
11048 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
11049 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
11050 && (!FLOAT_TYPE_P (type) || flag_associative_math))
11051 {
11052 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
11053 if (tem)
11054 return tem;
11055 }
11056
11057 goto associate;
11058
11059 case MULT_EXPR:
11060 /* (-A) * (-B) -> A * B */
11061 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11062 return fold_build2_loc (loc, MULT_EXPR, type,
11063 fold_convert_loc (loc, type,
11064 TREE_OPERAND (arg0, 0)),
11065 fold_convert_loc (loc, type,
11066 negate_expr (arg1)));
11067 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11068 return fold_build2_loc (loc, MULT_EXPR, type,
11069 fold_convert_loc (loc, type,
11070 negate_expr (arg0)),
11071 fold_convert_loc (loc, type,
11072 TREE_OPERAND (arg1, 0)));
11073
11074 if (! FLOAT_TYPE_P (type))
11075 {
11076 if (integer_zerop (arg1))
11077 return omit_one_operand_loc (loc, type, arg1, arg0);
11078 if (integer_onep (arg1))
11079 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11080 /* Transform x * -1 into -x. Make sure to do the negation
11081 on the original operand with conversions not stripped
11082 because we can only strip non-sign-changing conversions. */
11083 if (integer_minus_onep (arg1))
11084 return fold_convert_loc (loc, type, negate_expr (op0));
11085 /* Transform x * -C into -x * C if x is easily negatable. */
11086 if (TREE_CODE (arg1) == INTEGER_CST
11087 && tree_int_cst_sgn (arg1) == -1
11088 && negate_expr_p (arg0)
11089 && (tem = negate_expr (arg1)) != arg1
11090 && !TREE_OVERFLOW (tem))
11091 return fold_build2_loc (loc, MULT_EXPR, type,
11092 fold_convert_loc (loc, type,
11093 negate_expr (arg0)),
11094 tem);
11095
11096 /* (a * (1 << b)) is (a << b) */
11097 if (TREE_CODE (arg1) == LSHIFT_EXPR
11098 && integer_onep (TREE_OPERAND (arg1, 0)))
11099 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
11100 TREE_OPERAND (arg1, 1));
11101 if (TREE_CODE (arg0) == LSHIFT_EXPR
11102 && integer_onep (TREE_OPERAND (arg0, 0)))
11103 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
11104 TREE_OPERAND (arg0, 1));
11105
11106 /* (A + A) * C -> A * 2 * C */
11107 if (TREE_CODE (arg0) == PLUS_EXPR
11108 && TREE_CODE (arg1) == INTEGER_CST
11109 && operand_equal_p (TREE_OPERAND (arg0, 0),
11110 TREE_OPERAND (arg0, 1), 0))
11111 return fold_build2_loc (loc, MULT_EXPR, type,
11112 omit_one_operand_loc (loc, type,
11113 TREE_OPERAND (arg0, 0),
11114 TREE_OPERAND (arg0, 1)),
11115 fold_build2_loc (loc, MULT_EXPR, type,
11116 build_int_cst (type, 2) , arg1));
11117
11118 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
11119 sign-changing only. */
11120 if (TREE_CODE (arg1) == INTEGER_CST
11121 && TREE_CODE (arg0) == EXACT_DIV_EXPR
11122 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
11123 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11124
11125 strict_overflow_p = false;
11126 if (TREE_CODE (arg1) == INTEGER_CST
11127 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11128 &strict_overflow_p)))
11129 {
11130 if (strict_overflow_p)
11131 fold_overflow_warning (("assuming signed overflow does not "
11132 "occur when simplifying "
11133 "multiplication"),
11134 WARN_STRICT_OVERFLOW_MISC);
11135 return fold_convert_loc (loc, type, tem);
11136 }
11137
11138 /* Optimize z * conj(z) for integer complex numbers. */
11139 if (TREE_CODE (arg0) == CONJ_EXPR
11140 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11141 return fold_mult_zconjz (loc, type, arg1);
11142 if (TREE_CODE (arg1) == CONJ_EXPR
11143 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11144 return fold_mult_zconjz (loc, type, arg0);
11145 }
11146 else
11147 {
11148 /* Maybe fold x * 0 to 0. The expressions aren't the same
11149 when x is NaN, since x * 0 is also NaN. Nor are they the
11150 same in modes with signed zeros, since multiplying a
11151 negative value by 0 gives -0, not +0. */
11152 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11153 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11154 && real_zerop (arg1))
11155 return omit_one_operand_loc (loc, type, arg1, arg0);
11156 /* In IEEE floating point, x*1 is not equivalent to x for snans.
11157 Likewise for complex arithmetic with signed zeros. */
11158 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11159 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11160 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11161 && real_onep (arg1))
11162 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11163
11164 /* Transform x * -1.0 into -x. */
11165 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
11166 && (!HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11167 || !COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
11168 && real_minus_onep (arg1))
11169 return fold_convert_loc (loc, type, negate_expr (arg0));
11170
11171 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
11172 the result for floating point types due to rounding so it is applied
11173 only if -fassociative-math was specify. */
11174 if (flag_associative_math
11175 && TREE_CODE (arg0) == RDIV_EXPR
11176 && TREE_CODE (arg1) == REAL_CST
11177 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
11178 {
11179 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
11180 arg1);
11181 if (tem)
11182 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11183 TREE_OPERAND (arg0, 1));
11184 }
11185
11186 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
11187 if (operand_equal_p (arg0, arg1, 0))
11188 {
11189 tree tem = fold_strip_sign_ops (arg0);
11190 if (tem != NULL_TREE)
11191 {
11192 tem = fold_convert_loc (loc, type, tem);
11193 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
11194 }
11195 }
11196
11197 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
11198 This is not the same for NaNs or if signed zeros are
11199 involved. */
11200 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
11201 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg0)))
11202 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
11203 && TREE_CODE (arg1) == COMPLEX_CST
11204 && real_zerop (TREE_REALPART (arg1)))
11205 {
11206 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
11207 if (real_onep (TREE_IMAGPART (arg1)))
11208 return
11209 fold_build2_loc (loc, COMPLEX_EXPR, type,
11210 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
11211 rtype, arg0)),
11212 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
11213 else if (real_minus_onep (TREE_IMAGPART (arg1)))
11214 return
11215 fold_build2_loc (loc, COMPLEX_EXPR, type,
11216 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
11217 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
11218 rtype, arg0)));
11219 }
11220
11221 /* Optimize z * conj(z) for floating point complex numbers.
11222 Guarded by flag_unsafe_math_optimizations as non-finite
11223 imaginary components don't produce scalar results. */
11224 if (flag_unsafe_math_optimizations
11225 && TREE_CODE (arg0) == CONJ_EXPR
11226 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11227 return fold_mult_zconjz (loc, type, arg1);
11228 if (flag_unsafe_math_optimizations
11229 && TREE_CODE (arg1) == CONJ_EXPR
11230 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11231 return fold_mult_zconjz (loc, type, arg0);
11232
11233 if (flag_unsafe_math_optimizations)
11234 {
11235 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11236 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11237
11238 /* Optimizations of root(...)*root(...). */
11239 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
11240 {
11241 tree rootfn, arg;
11242 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11243 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11244
11245 /* Optimize sqrt(x)*sqrt(x) as x. */
11246 if (BUILTIN_SQRT_P (fcode0)
11247 && operand_equal_p (arg00, arg10, 0)
11248 && ! HONOR_SNANS (TYPE_MODE (type)))
11249 return arg00;
11250
11251 /* Optimize root(x)*root(y) as root(x*y). */
11252 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11253 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
11254 return build_call_expr_loc (loc, rootfn, 1, arg);
11255 }
11256
11257 /* Optimize expN(x)*expN(y) as expN(x+y). */
11258 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
11259 {
11260 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11261 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11262 CALL_EXPR_ARG (arg0, 0),
11263 CALL_EXPR_ARG (arg1, 0));
11264 return build_call_expr_loc (loc, expfn, 1, arg);
11265 }
11266
11267 /* Optimizations of pow(...)*pow(...). */
11268 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
11269 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
11270 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
11271 {
11272 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11273 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11274 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11275 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11276
11277 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
11278 if (operand_equal_p (arg01, arg11, 0))
11279 {
11280 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11281 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
11282 arg00, arg10);
11283 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
11284 }
11285
11286 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
11287 if (operand_equal_p (arg00, arg10, 0))
11288 {
11289 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11290 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
11291 arg01, arg11);
11292 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
11293 }
11294 }
11295
11296 /* Optimize tan(x)*cos(x) as sin(x). */
11297 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
11298 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
11299 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
11300 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
11301 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
11302 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
11303 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11304 CALL_EXPR_ARG (arg1, 0), 0))
11305 {
11306 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
11307
11308 if (sinfn != NULL_TREE)
11309 return build_call_expr_loc (loc, sinfn, 1,
11310 CALL_EXPR_ARG (arg0, 0));
11311 }
11312
11313 /* Optimize x*pow(x,c) as pow(x,c+1). */
11314 if (fcode1 == BUILT_IN_POW
11315 || fcode1 == BUILT_IN_POWF
11316 || fcode1 == BUILT_IN_POWL)
11317 {
11318 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11319 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11320 if (TREE_CODE (arg11) == REAL_CST
11321 && !TREE_OVERFLOW (arg11)
11322 && operand_equal_p (arg0, arg10, 0))
11323 {
11324 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11325 REAL_VALUE_TYPE c;
11326 tree arg;
11327
11328 c = TREE_REAL_CST (arg11);
11329 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11330 arg = build_real (type, c);
11331 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11332 }
11333 }
11334
11335 /* Optimize pow(x,c)*x as pow(x,c+1). */
11336 if (fcode0 == BUILT_IN_POW
11337 || fcode0 == BUILT_IN_POWF
11338 || fcode0 == BUILT_IN_POWL)
11339 {
11340 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11341 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11342 if (TREE_CODE (arg01) == REAL_CST
11343 && !TREE_OVERFLOW (arg01)
11344 && operand_equal_p (arg1, arg00, 0))
11345 {
11346 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11347 REAL_VALUE_TYPE c;
11348 tree arg;
11349
11350 c = TREE_REAL_CST (arg01);
11351 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
11352 arg = build_real (type, c);
11353 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11354 }
11355 }
11356
11357 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
11358 if (!in_gimple_form
11359 && optimize
11360 && operand_equal_p (arg0, arg1, 0))
11361 {
11362 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
11363
11364 if (powfn)
11365 {
11366 tree arg = build_real (type, dconst2);
11367 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
11368 }
11369 }
11370 }
11371 }
11372 goto associate;
11373
11374 case BIT_IOR_EXPR:
11375 bit_ior:
11376 if (integer_all_onesp (arg1))
11377 return omit_one_operand_loc (loc, type, arg1, arg0);
11378 if (integer_zerop (arg1))
11379 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11380 if (operand_equal_p (arg0, arg1, 0))
11381 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11382
11383 /* ~X | X is -1. */
11384 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11385 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11386 {
11387 t1 = build_zero_cst (type);
11388 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11389 return omit_one_operand_loc (loc, type, t1, arg1);
11390 }
11391
11392 /* X | ~X is -1. */
11393 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11394 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11395 {
11396 t1 = build_zero_cst (type);
11397 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11398 return omit_one_operand_loc (loc, type, t1, arg0);
11399 }
11400
11401 /* Canonicalize (X & C1) | C2. */
11402 if (TREE_CODE (arg0) == BIT_AND_EXPR
11403 && TREE_CODE (arg1) == INTEGER_CST
11404 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11405 {
11406 double_int c1, c2, c3, msk;
11407 int width = TYPE_PRECISION (type), w;
11408 bool try_simplify = true;
11409
11410 c1 = tree_to_double_int (TREE_OPERAND (arg0, 1));
11411 c2 = tree_to_double_int (arg1);
11412
11413 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11414 if ((c1 & c2) == c1)
11415 return omit_one_operand_loc (loc, type, arg1,
11416 TREE_OPERAND (arg0, 0));
11417
11418 msk = double_int::mask (width);
11419
11420 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11421 if (msk.and_not (c1 | c2).is_zero ())
11422 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11423 TREE_OPERAND (arg0, 0), arg1);
11424
11425 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11426 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11427 mode which allows further optimizations. */
11428 c1 &= msk;
11429 c2 &= msk;
11430 c3 = c1.and_not (c2);
11431 for (w = BITS_PER_UNIT;
11432 w <= width && w <= HOST_BITS_PER_WIDE_INT;
11433 w <<= 1)
11434 {
11435 unsigned HOST_WIDE_INT mask
11436 = HOST_WIDE_INT_M1U >> (HOST_BITS_PER_WIDE_INT - w);
11437 if (((c1.low | c2.low) & mask) == mask
11438 && (c1.low & ~mask) == 0 && c1.high == 0)
11439 {
11440 c3 = double_int::from_uhwi (mask);
11441 break;
11442 }
11443 }
11444
11445 /* If X is a tree of the form (Y * K1) & K2, this might conflict
11446 with that optimization from the BIT_AND_EXPR optimizations.
11447 This could end up in an infinite recursion. */
11448 if (TREE_CODE (TREE_OPERAND (arg0, 0)) == MULT_EXPR
11449 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
11450 == INTEGER_CST)
11451 {
11452 tree t = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
11453 double_int masked = mask_with_tz (type, c3, tree_to_double_int (t));
11454
11455 try_simplify = (masked != c1);
11456 }
11457
11458 if (try_simplify && c3 != c1)
11459 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11460 fold_build2_loc (loc, BIT_AND_EXPR, type,
11461 TREE_OPERAND (arg0, 0),
11462 double_int_to_tree (type,
11463 c3)),
11464 arg1);
11465 }
11466
11467 /* (X & Y) | Y is (X, Y). */
11468 if (TREE_CODE (arg0) == BIT_AND_EXPR
11469 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11470 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11471 /* (X & Y) | X is (Y, X). */
11472 if (TREE_CODE (arg0) == BIT_AND_EXPR
11473 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11474 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11475 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11476 /* X | (X & Y) is (Y, X). */
11477 if (TREE_CODE (arg1) == BIT_AND_EXPR
11478 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11479 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11480 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11481 /* X | (Y & X) is (Y, X). */
11482 if (TREE_CODE (arg1) == BIT_AND_EXPR
11483 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11484 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11485 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11486
11487 /* (X & ~Y) | (~X & Y) is X ^ Y */
11488 if (TREE_CODE (arg0) == BIT_AND_EXPR
11489 && TREE_CODE (arg1) == BIT_AND_EXPR)
11490 {
11491 tree a0, a1, l0, l1, n0, n1;
11492
11493 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11494 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11495
11496 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11497 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11498
11499 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11500 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11501
11502 if ((operand_equal_p (n0, a0, 0)
11503 && operand_equal_p (n1, a1, 0))
11504 || (operand_equal_p (n0, a1, 0)
11505 && operand_equal_p (n1, a0, 0)))
11506 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11507 }
11508
11509 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11510 if (t1 != NULL_TREE)
11511 return t1;
11512
11513 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11514
11515 This results in more efficient code for machines without a NAND
11516 instruction. Combine will canonicalize to the first form
11517 which will allow use of NAND instructions provided by the
11518 backend if they exist. */
11519 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11520 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11521 {
11522 return
11523 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11524 build2 (BIT_AND_EXPR, type,
11525 fold_convert_loc (loc, type,
11526 TREE_OPERAND (arg0, 0)),
11527 fold_convert_loc (loc, type,
11528 TREE_OPERAND (arg1, 0))));
11529 }
11530
11531 /* See if this can be simplified into a rotate first. If that
11532 is unsuccessful continue in the association code. */
11533 goto bit_rotate;
11534
11535 case BIT_XOR_EXPR:
11536 if (integer_zerop (arg1))
11537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11538 if (integer_all_onesp (arg1))
11539 return fold_build1_loc (loc, BIT_NOT_EXPR, type, op0);
11540 if (operand_equal_p (arg0, arg1, 0))
11541 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11542
11543 /* ~X ^ X is -1. */
11544 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11545 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11546 {
11547 t1 = build_zero_cst (type);
11548 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11549 return omit_one_operand_loc (loc, type, t1, arg1);
11550 }
11551
11552 /* X ^ ~X is -1. */
11553 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11554 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11555 {
11556 t1 = build_zero_cst (type);
11557 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11558 return omit_one_operand_loc (loc, type, t1, arg0);
11559 }
11560
11561 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11562 with a constant, and the two constants have no bits in common,
11563 we should treat this as a BIT_IOR_EXPR since this may produce more
11564 simplifications. */
11565 if (TREE_CODE (arg0) == BIT_AND_EXPR
11566 && TREE_CODE (arg1) == BIT_AND_EXPR
11567 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11568 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11569 && integer_zerop (const_binop (BIT_AND_EXPR,
11570 TREE_OPERAND (arg0, 1),
11571 TREE_OPERAND (arg1, 1))))
11572 {
11573 code = BIT_IOR_EXPR;
11574 goto bit_ior;
11575 }
11576
11577 /* (X | Y) ^ X -> Y & ~ X*/
11578 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11579 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11580 {
11581 tree t2 = TREE_OPERAND (arg0, 1);
11582 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11583 arg1);
11584 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11585 fold_convert_loc (loc, type, t2),
11586 fold_convert_loc (loc, type, t1));
11587 return t1;
11588 }
11589
11590 /* (Y | X) ^ X -> Y & ~ X*/
11591 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11592 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11593 {
11594 tree t2 = TREE_OPERAND (arg0, 0);
11595 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11596 arg1);
11597 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11598 fold_convert_loc (loc, type, t2),
11599 fold_convert_loc (loc, type, t1));
11600 return t1;
11601 }
11602
11603 /* X ^ (X | Y) -> Y & ~ X*/
11604 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11605 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11606 {
11607 tree t2 = TREE_OPERAND (arg1, 1);
11608 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11609 arg0);
11610 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11611 fold_convert_loc (loc, type, t2),
11612 fold_convert_loc (loc, type, t1));
11613 return t1;
11614 }
11615
11616 /* X ^ (Y | X) -> Y & ~ X*/
11617 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11618 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11619 {
11620 tree t2 = TREE_OPERAND (arg1, 0);
11621 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11622 arg0);
11623 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11624 fold_convert_loc (loc, type, t2),
11625 fold_convert_loc (loc, type, t1));
11626 return t1;
11627 }
11628
11629 /* Convert ~X ^ ~Y to X ^ Y. */
11630 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11631 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11632 return fold_build2_loc (loc, code, type,
11633 fold_convert_loc (loc, type,
11634 TREE_OPERAND (arg0, 0)),
11635 fold_convert_loc (loc, type,
11636 TREE_OPERAND (arg1, 0)));
11637
11638 /* Convert ~X ^ C to X ^ ~C. */
11639 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11640 && TREE_CODE (arg1) == INTEGER_CST)
11641 return fold_build2_loc (loc, code, type,
11642 fold_convert_loc (loc, type,
11643 TREE_OPERAND (arg0, 0)),
11644 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11645
11646 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11647 if (TREE_CODE (arg0) == BIT_AND_EXPR
11648 && integer_onep (TREE_OPERAND (arg0, 1))
11649 && integer_onep (arg1))
11650 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11651 build_zero_cst (TREE_TYPE (arg0)));
11652
11653 /* Fold (X & Y) ^ Y as ~X & Y. */
11654 if (TREE_CODE (arg0) == BIT_AND_EXPR
11655 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11656 {
11657 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11658 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11659 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11660 fold_convert_loc (loc, type, arg1));
11661 }
11662 /* Fold (X & Y) ^ X as ~Y & X. */
11663 if (TREE_CODE (arg0) == BIT_AND_EXPR
11664 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11665 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11666 {
11667 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11668 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11669 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11670 fold_convert_loc (loc, type, arg1));
11671 }
11672 /* Fold X ^ (X & Y) as X & ~Y. */
11673 if (TREE_CODE (arg1) == BIT_AND_EXPR
11674 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11675 {
11676 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11677 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11678 fold_convert_loc (loc, type, arg0),
11679 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11680 }
11681 /* Fold X ^ (Y & X) as ~Y & X. */
11682 if (TREE_CODE (arg1) == BIT_AND_EXPR
11683 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11684 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11685 {
11686 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11687 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11688 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11689 fold_convert_loc (loc, type, arg0));
11690 }
11691
11692 /* See if this can be simplified into a rotate first. If that
11693 is unsuccessful continue in the association code. */
11694 goto bit_rotate;
11695
11696 case BIT_AND_EXPR:
11697 if (integer_all_onesp (arg1))
11698 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11699 if (integer_zerop (arg1))
11700 return omit_one_operand_loc (loc, type, arg1, arg0);
11701 if (operand_equal_p (arg0, arg1, 0))
11702 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
11703
11704 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11705 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11706 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11707 || (TREE_CODE (arg0) == EQ_EXPR
11708 && integer_zerop (TREE_OPERAND (arg0, 1))))
11709 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11710 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11711
11712 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11713 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11714 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11715 || (TREE_CODE (arg1) == EQ_EXPR
11716 && integer_zerop (TREE_OPERAND (arg1, 1))))
11717 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11718 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11719
11720 /* Canonicalize (X | C1) & C2 as (X & C2) | (C1 & C2). */
11721 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11722 && TREE_CODE (arg1) == INTEGER_CST
11723 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11724 {
11725 tree tmp1 = fold_convert_loc (loc, type, arg1);
11726 tree tmp2 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11727 tree tmp3 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11728 tmp2 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp2, tmp1);
11729 tmp3 = fold_build2_loc (loc, BIT_AND_EXPR, type, tmp3, tmp1);
11730 return
11731 fold_convert_loc (loc, type,
11732 fold_build2_loc (loc, BIT_IOR_EXPR,
11733 type, tmp2, tmp3));
11734 }
11735
11736 /* (X | Y) & Y is (X, Y). */
11737 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11738 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11739 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 0));
11740 /* (X | Y) & X is (Y, X). */
11741 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11742 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11743 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11744 return omit_one_operand_loc (loc, type, arg1, TREE_OPERAND (arg0, 1));
11745 /* X & (X | Y) is (Y, X). */
11746 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11747 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0)
11748 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 1)))
11749 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 1));
11750 /* X & (Y | X) is (Y, X). */
11751 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11752 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11753 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11754 return omit_one_operand_loc (loc, type, arg0, TREE_OPERAND (arg1, 0));
11755
11756 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11757 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11758 && integer_onep (TREE_OPERAND (arg0, 1))
11759 && integer_onep (arg1))
11760 {
11761 tree tem2;
11762 tem = TREE_OPERAND (arg0, 0);
11763 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11764 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11765 tem, tem2);
11766 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11767 build_zero_cst (TREE_TYPE (tem)));
11768 }
11769 /* Fold ~X & 1 as (X & 1) == 0. */
11770 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11771 && integer_onep (arg1))
11772 {
11773 tree tem2;
11774 tem = TREE_OPERAND (arg0, 0);
11775 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11776 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11777 tem, tem2);
11778 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11779 build_zero_cst (TREE_TYPE (tem)));
11780 }
11781 /* Fold !X & 1 as X == 0. */
11782 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11783 && integer_onep (arg1))
11784 {
11785 tem = TREE_OPERAND (arg0, 0);
11786 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11787 build_zero_cst (TREE_TYPE (tem)));
11788 }
11789
11790 /* Fold (X ^ Y) & Y as ~X & Y. */
11791 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11792 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11793 {
11794 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11795 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11796 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11797 fold_convert_loc (loc, type, arg1));
11798 }
11799 /* Fold (X ^ Y) & X as ~Y & X. */
11800 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11801 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11802 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11803 {
11804 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11805 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11806 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11807 fold_convert_loc (loc, type, arg1));
11808 }
11809 /* Fold X & (X ^ Y) as X & ~Y. */
11810 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11812 {
11813 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11814 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11815 fold_convert_loc (loc, type, arg0),
11816 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11817 }
11818 /* Fold X & (Y ^ X) as ~Y & X. */
11819 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11820 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11821 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11822 {
11823 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11824 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11825 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11826 fold_convert_loc (loc, type, arg0));
11827 }
11828
11829 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11830 multiple of 1 << CST. */
11831 if (TREE_CODE (arg1) == INTEGER_CST)
11832 {
11833 double_int cst1 = tree_to_double_int (arg1);
11834 double_int ncst1 = (-cst1).ext (TYPE_PRECISION (TREE_TYPE (arg1)),
11835 TYPE_UNSIGNED (TREE_TYPE (arg1)));
11836 if ((cst1 & ncst1) == ncst1
11837 && multiple_of_p (type, arg0,
11838 double_int_to_tree (TREE_TYPE (arg1), ncst1)))
11839 return fold_convert_loc (loc, type, arg0);
11840 }
11841
11842 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11843 bits from CST2. */
11844 if (TREE_CODE (arg1) == INTEGER_CST
11845 && TREE_CODE (arg0) == MULT_EXPR
11846 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11847 {
11848 double_int masked
11849 = mask_with_tz (type, tree_to_double_int (arg1),
11850 tree_to_double_int (TREE_OPERAND (arg0, 1)));
11851
11852 if (masked.is_zero ())
11853 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11854 arg0, arg1);
11855 else if (masked != tree_to_double_int (arg1))
11856 return fold_build2_loc (loc, code, type, op0,
11857 double_int_to_tree (type, masked));
11858 }
11859
11860 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11861 ((A & N) + B) & M -> (A + B) & M
11862 Similarly if (N & M) == 0,
11863 ((A | N) + B) & M -> (A + B) & M
11864 and for - instead of + (or unary - instead of +)
11865 and/or ^ instead of |.
11866 If B is constant and (B & M) == 0, fold into A & M. */
11867 if (host_integerp (arg1, 1))
11868 {
11869 unsigned HOST_WIDE_INT cst1 = tree_low_cst (arg1, 1);
11870 if (~cst1 && (cst1 & (cst1 + 1)) == 0
11871 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11872 && (TREE_CODE (arg0) == PLUS_EXPR
11873 || TREE_CODE (arg0) == MINUS_EXPR
11874 || TREE_CODE (arg0) == NEGATE_EXPR)
11875 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11876 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11877 {
11878 tree pmop[2];
11879 int which = 0;
11880 unsigned HOST_WIDE_INT cst0;
11881
11882 /* Now we know that arg0 is (C + D) or (C - D) or
11883 -C and arg1 (M) is == (1LL << cst) - 1.
11884 Store C into PMOP[0] and D into PMOP[1]. */
11885 pmop[0] = TREE_OPERAND (arg0, 0);
11886 pmop[1] = NULL;
11887 if (TREE_CODE (arg0) != NEGATE_EXPR)
11888 {
11889 pmop[1] = TREE_OPERAND (arg0, 1);
11890 which = 1;
11891 }
11892
11893 if (!host_integerp (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11894 || (tree_low_cst (TYPE_MAX_VALUE (TREE_TYPE (arg0)), 1)
11895 & cst1) != cst1)
11896 which = -1;
11897
11898 for (; which >= 0; which--)
11899 switch (TREE_CODE (pmop[which]))
11900 {
11901 case BIT_AND_EXPR:
11902 case BIT_IOR_EXPR:
11903 case BIT_XOR_EXPR:
11904 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11905 != INTEGER_CST)
11906 break;
11907 /* tree_low_cst not used, because we don't care about
11908 the upper bits. */
11909 cst0 = TREE_INT_CST_LOW (TREE_OPERAND (pmop[which], 1));
11910 cst0 &= cst1;
11911 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11912 {
11913 if (cst0 != cst1)
11914 break;
11915 }
11916 else if (cst0 != 0)
11917 break;
11918 /* If C or D is of the form (A & N) where
11919 (N & M) == M, or of the form (A | N) or
11920 (A ^ N) where (N & M) == 0, replace it with A. */
11921 pmop[which] = TREE_OPERAND (pmop[which], 0);
11922 break;
11923 case INTEGER_CST:
11924 /* If C or D is a N where (N & M) == 0, it can be
11925 omitted (assumed 0). */
11926 if ((TREE_CODE (arg0) == PLUS_EXPR
11927 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11928 && (TREE_INT_CST_LOW (pmop[which]) & cst1) == 0)
11929 pmop[which] = NULL;
11930 break;
11931 default:
11932 break;
11933 }
11934
11935 /* Only build anything new if we optimized one or both arguments
11936 above. */
11937 if (pmop[0] != TREE_OPERAND (arg0, 0)
11938 || (TREE_CODE (arg0) != NEGATE_EXPR
11939 && pmop[1] != TREE_OPERAND (arg0, 1)))
11940 {
11941 tree utype = TREE_TYPE (arg0);
11942 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11943 {
11944 /* Perform the operations in a type that has defined
11945 overflow behavior. */
11946 utype = unsigned_type_for (TREE_TYPE (arg0));
11947 if (pmop[0] != NULL)
11948 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11949 if (pmop[1] != NULL)
11950 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11951 }
11952
11953 if (TREE_CODE (arg0) == NEGATE_EXPR)
11954 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11955 else if (TREE_CODE (arg0) == PLUS_EXPR)
11956 {
11957 if (pmop[0] != NULL && pmop[1] != NULL)
11958 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11959 pmop[0], pmop[1]);
11960 else if (pmop[0] != NULL)
11961 tem = pmop[0];
11962 else if (pmop[1] != NULL)
11963 tem = pmop[1];
11964 else
11965 return build_int_cst (type, 0);
11966 }
11967 else if (pmop[0] == NULL)
11968 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11969 else
11970 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11971 pmop[0], pmop[1]);
11972 /* TEM is now the new binary +, - or unary - replacement. */
11973 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11974 fold_convert_loc (loc, utype, arg1));
11975 return fold_convert_loc (loc, type, tem);
11976 }
11977 }
11978 }
11979
11980 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11981 if (t1 != NULL_TREE)
11982 return t1;
11983 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11984 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11985 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11986 {
11987 prec = TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11988
11989 if (prec < BITS_PER_WORD && prec < HOST_BITS_PER_WIDE_INT
11990 && (~TREE_INT_CST_LOW (arg1)
11991 & (((HOST_WIDE_INT) 1 << prec) - 1)) == 0)
11992 return
11993 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11994 }
11995
11996 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11997
11998 This results in more efficient code for machines without a NOR
11999 instruction. Combine will canonicalize to the first form
12000 which will allow use of NOR instructions provided by the
12001 backend if they exist. */
12002 if (TREE_CODE (arg0) == BIT_NOT_EXPR
12003 && TREE_CODE (arg1) == BIT_NOT_EXPR)
12004 {
12005 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
12006 build2 (BIT_IOR_EXPR, type,
12007 fold_convert_loc (loc, type,
12008 TREE_OPERAND (arg0, 0)),
12009 fold_convert_loc (loc, type,
12010 TREE_OPERAND (arg1, 0))));
12011 }
12012
12013 /* If arg0 is derived from the address of an object or function, we may
12014 be able to fold this expression using the object or function's
12015 alignment. */
12016 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && host_integerp (arg1, 1))
12017 {
12018 unsigned HOST_WIDE_INT modulus, residue;
12019 unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1);
12020
12021 modulus = get_pointer_modulus_and_residue (arg0, &residue,
12022 integer_onep (arg1));
12023
12024 /* This works because modulus is a power of 2. If this weren't the
12025 case, we'd have to replace it by its greatest power-of-2
12026 divisor: modulus & -modulus. */
12027 if (low < modulus)
12028 return build_int_cst (type, residue & low);
12029 }
12030
12031 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
12032 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
12033 if the new mask might be further optimized. */
12034 if ((TREE_CODE (arg0) == LSHIFT_EXPR
12035 || TREE_CODE (arg0) == RSHIFT_EXPR)
12036 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
12037 && TREE_CODE (arg1) == INTEGER_CST
12038 && host_integerp (TREE_OPERAND (arg0, 1), 1)
12039 && tree_low_cst (TREE_OPERAND (arg0, 1), 1) > 0
12040 && (tree_low_cst (TREE_OPERAND (arg0, 1), 1)
12041 < TYPE_PRECISION (TREE_TYPE (arg0))))
12042 {
12043 unsigned int shiftc = tree_low_cst (TREE_OPERAND (arg0, 1), 1);
12044 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
12045 unsigned HOST_WIDE_INT newmask, zerobits = 0;
12046 tree shift_type = TREE_TYPE (arg0);
12047
12048 if (TREE_CODE (arg0) == LSHIFT_EXPR)
12049 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
12050 else if (TREE_CODE (arg0) == RSHIFT_EXPR
12051 && TYPE_PRECISION (TREE_TYPE (arg0))
12052 == GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (arg0))))
12053 {
12054 prec = TYPE_PRECISION (TREE_TYPE (arg0));
12055 tree arg00 = TREE_OPERAND (arg0, 0);
12056 /* See if more bits can be proven as zero because of
12057 zero extension. */
12058 if (TREE_CODE (arg00) == NOP_EXPR
12059 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
12060 {
12061 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
12062 if (TYPE_PRECISION (inner_type)
12063 == GET_MODE_BITSIZE (TYPE_MODE (inner_type))
12064 && TYPE_PRECISION (inner_type) < prec)
12065 {
12066 prec = TYPE_PRECISION (inner_type);
12067 /* See if we can shorten the right shift. */
12068 if (shiftc < prec)
12069 shift_type = inner_type;
12070 }
12071 }
12072 zerobits = ~(unsigned HOST_WIDE_INT) 0;
12073 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
12074 zerobits <<= prec - shiftc;
12075 /* For arithmetic shift if sign bit could be set, zerobits
12076 can contain actually sign bits, so no transformation is
12077 possible, unless MASK masks them all away. In that
12078 case the shift needs to be converted into logical shift. */
12079 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
12080 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
12081 {
12082 if ((mask & zerobits) == 0)
12083 shift_type = unsigned_type_for (TREE_TYPE (arg0));
12084 else
12085 zerobits = 0;
12086 }
12087 }
12088
12089 /* ((X << 16) & 0xff00) is (X, 0). */
12090 if ((mask & zerobits) == mask)
12091 return omit_one_operand_loc (loc, type,
12092 build_int_cst (type, 0), arg0);
12093
12094 newmask = mask | zerobits;
12095 if (newmask != mask && (newmask & (newmask + 1)) == 0)
12096 {
12097 /* Only do the transformation if NEWMASK is some integer
12098 mode's mask. */
12099 for (prec = BITS_PER_UNIT;
12100 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
12101 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
12102 break;
12103 if (prec < HOST_BITS_PER_WIDE_INT
12104 || newmask == ~(unsigned HOST_WIDE_INT) 0)
12105 {
12106 tree newmaskt;
12107
12108 if (shift_type != TREE_TYPE (arg0))
12109 {
12110 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
12111 fold_convert_loc (loc, shift_type,
12112 TREE_OPERAND (arg0, 0)),
12113 TREE_OPERAND (arg0, 1));
12114 tem = fold_convert_loc (loc, type, tem);
12115 }
12116 else
12117 tem = op0;
12118 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
12119 if (!tree_int_cst_equal (newmaskt, arg1))
12120 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
12121 }
12122 }
12123 }
12124
12125 goto associate;
12126
12127 case RDIV_EXPR:
12128 /* Don't touch a floating-point divide by zero unless the mode
12129 of the constant can represent infinity. */
12130 if (TREE_CODE (arg1) == REAL_CST
12131 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
12132 && real_zerop (arg1))
12133 return NULL_TREE;
12134
12135 /* Optimize A / A to 1.0 if we don't care about
12136 NaNs or Infinities. Skip the transformation
12137 for non-real operands. */
12138 if (SCALAR_FLOAT_TYPE_P (TREE_TYPE (arg0))
12139 && ! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
12140 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg0)))
12141 && operand_equal_p (arg0, arg1, 0))
12142 {
12143 tree r = build_real (TREE_TYPE (arg0), dconst1);
12144
12145 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12146 }
12147
12148 /* The complex version of the above A / A optimization. */
12149 if (COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
12150 && operand_equal_p (arg0, arg1, 0))
12151 {
12152 tree elem_type = TREE_TYPE (TREE_TYPE (arg0));
12153 if (! HONOR_NANS (TYPE_MODE (elem_type))
12154 && ! HONOR_INFINITIES (TYPE_MODE (elem_type)))
12155 {
12156 tree r = build_real (elem_type, dconst1);
12157 /* omit_two_operands will call fold_convert for us. */
12158 return omit_two_operands_loc (loc, type, r, arg0, arg1);
12159 }
12160 }
12161
12162 /* (-A) / (-B) -> A / B */
12163 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
12164 return fold_build2_loc (loc, RDIV_EXPR, type,
12165 TREE_OPERAND (arg0, 0),
12166 negate_expr (arg1));
12167 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
12168 return fold_build2_loc (loc, RDIV_EXPR, type,
12169 negate_expr (arg0),
12170 TREE_OPERAND (arg1, 0));
12171
12172 /* In IEEE floating point, x/1 is not equivalent to x for snans. */
12173 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12174 && real_onep (arg1))
12175 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12176
12177 /* In IEEE floating point, x/-1 is not equivalent to -x for snans. */
12178 if (!HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
12179 && real_minus_onep (arg1))
12180 return non_lvalue_loc (loc, fold_convert_loc (loc, type,
12181 negate_expr (arg0)));
12182
12183 /* If ARG1 is a constant, we can convert this to a multiply by the
12184 reciprocal. This does not have the same rounding properties,
12185 so only do this if -freciprocal-math. We can actually
12186 always safely do it if ARG1 is a power of two, but it's hard to
12187 tell if it is or not in a portable manner. */
12188 if (optimize
12189 && (TREE_CODE (arg1) == REAL_CST
12190 || (TREE_CODE (arg1) == COMPLEX_CST
12191 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg1)))
12192 || (TREE_CODE (arg1) == VECTOR_CST
12193 && VECTOR_FLOAT_TYPE_P (TREE_TYPE (arg1)))))
12194 {
12195 if (flag_reciprocal_math
12196 && 0 != (tem = const_binop (code, build_one_cst (type), arg1)))
12197 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tem);
12198 /* Find the reciprocal if optimizing and the result is exact.
12199 TODO: Complex reciprocal not implemented. */
12200 if (TREE_CODE (arg1) != COMPLEX_CST)
12201 {
12202 tree inverse = exact_inverse (TREE_TYPE (arg0), arg1);
12203
12204 if (inverse)
12205 return fold_build2_loc (loc, MULT_EXPR, type, arg0, inverse);
12206 }
12207 }
12208 /* Convert A/B/C to A/(B*C). */
12209 if (flag_reciprocal_math
12210 && TREE_CODE (arg0) == RDIV_EXPR)
12211 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
12212 fold_build2_loc (loc, MULT_EXPR, type,
12213 TREE_OPERAND (arg0, 1), arg1));
12214
12215 /* Convert A/(B/C) to (A/B)*C. */
12216 if (flag_reciprocal_math
12217 && TREE_CODE (arg1) == RDIV_EXPR)
12218 return fold_build2_loc (loc, MULT_EXPR, type,
12219 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
12220 TREE_OPERAND (arg1, 0)),
12221 TREE_OPERAND (arg1, 1));
12222
12223 /* Convert C1/(X*C2) into (C1/C2)/X. */
12224 if (flag_reciprocal_math
12225 && TREE_CODE (arg1) == MULT_EXPR
12226 && TREE_CODE (arg0) == REAL_CST
12227 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
12228 {
12229 tree tem = const_binop (RDIV_EXPR, arg0,
12230 TREE_OPERAND (arg1, 1));
12231 if (tem)
12232 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
12233 TREE_OPERAND (arg1, 0));
12234 }
12235
12236 if (flag_unsafe_math_optimizations)
12237 {
12238 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
12239 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
12240
12241 /* Optimize sin(x)/cos(x) as tan(x). */
12242 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
12243 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
12244 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
12245 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12246 CALL_EXPR_ARG (arg1, 0), 0))
12247 {
12248 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12249
12250 if (tanfn != NULL_TREE)
12251 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
12252 }
12253
12254 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
12255 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
12256 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
12257 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
12258 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
12259 CALL_EXPR_ARG (arg1, 0), 0))
12260 {
12261 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
12262
12263 if (tanfn != NULL_TREE)
12264 {
12265 tree tmp = build_call_expr_loc (loc, tanfn, 1,
12266 CALL_EXPR_ARG (arg0, 0));
12267 return fold_build2_loc (loc, RDIV_EXPR, type,
12268 build_real (type, dconst1), tmp);
12269 }
12270 }
12271
12272 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
12273 NaNs or Infinities. */
12274 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
12275 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
12276 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
12277 {
12278 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12279 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12280
12281 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12282 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12283 && operand_equal_p (arg00, arg01, 0))
12284 {
12285 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12286
12287 if (cosfn != NULL_TREE)
12288 return build_call_expr_loc (loc, cosfn, 1, arg00);
12289 }
12290 }
12291
12292 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
12293 NaNs or Infinities. */
12294 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
12295 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
12296 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
12297 {
12298 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12299 tree arg01 = CALL_EXPR_ARG (arg1, 0);
12300
12301 if (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg00)))
12302 && ! HONOR_INFINITIES (TYPE_MODE (TREE_TYPE (arg00)))
12303 && operand_equal_p (arg00, arg01, 0))
12304 {
12305 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
12306
12307 if (cosfn != NULL_TREE)
12308 {
12309 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
12310 return fold_build2_loc (loc, RDIV_EXPR, type,
12311 build_real (type, dconst1),
12312 tmp);
12313 }
12314 }
12315 }
12316
12317 /* Optimize pow(x,c)/x as pow(x,c-1). */
12318 if (fcode0 == BUILT_IN_POW
12319 || fcode0 == BUILT_IN_POWF
12320 || fcode0 == BUILT_IN_POWL)
12321 {
12322 tree arg00 = CALL_EXPR_ARG (arg0, 0);
12323 tree arg01 = CALL_EXPR_ARG (arg0, 1);
12324 if (TREE_CODE (arg01) == REAL_CST
12325 && !TREE_OVERFLOW (arg01)
12326 && operand_equal_p (arg1, arg00, 0))
12327 {
12328 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
12329 REAL_VALUE_TYPE c;
12330 tree arg;
12331
12332 c = TREE_REAL_CST (arg01);
12333 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
12334 arg = build_real (type, c);
12335 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
12336 }
12337 }
12338
12339 /* Optimize a/root(b/c) into a*root(c/b). */
12340 if (BUILTIN_ROOT_P (fcode1))
12341 {
12342 tree rootarg = CALL_EXPR_ARG (arg1, 0);
12343
12344 if (TREE_CODE (rootarg) == RDIV_EXPR)
12345 {
12346 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12347 tree b = TREE_OPERAND (rootarg, 0);
12348 tree c = TREE_OPERAND (rootarg, 1);
12349
12350 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
12351
12352 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
12353 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
12354 }
12355 }
12356
12357 /* Optimize x/expN(y) into x*expN(-y). */
12358 if (BUILTIN_EXPONENT_P (fcode1))
12359 {
12360 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12361 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
12362 arg1 = build_call_expr_loc (loc,
12363 expfn, 1,
12364 fold_convert_loc (loc, type, arg));
12365 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12366 }
12367
12368 /* Optimize x/pow(y,z) into x*pow(y,-z). */
12369 if (fcode1 == BUILT_IN_POW
12370 || fcode1 == BUILT_IN_POWF
12371 || fcode1 == BUILT_IN_POWL)
12372 {
12373 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
12374 tree arg10 = CALL_EXPR_ARG (arg1, 0);
12375 tree arg11 = CALL_EXPR_ARG (arg1, 1);
12376 tree neg11 = fold_convert_loc (loc, type,
12377 negate_expr (arg11));
12378 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
12379 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
12380 }
12381 }
12382 return NULL_TREE;
12383
12384 case TRUNC_DIV_EXPR:
12385 /* Optimize (X & (-A)) / A where A is a power of 2,
12386 to X >> log2(A) */
12387 if (TREE_CODE (arg0) == BIT_AND_EXPR
12388 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
12389 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
12390 {
12391 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
12392 arg1, TREE_OPERAND (arg0, 1));
12393 if (sum && integer_zerop (sum)) {
12394 unsigned long pow2;
12395
12396 if (TREE_INT_CST_LOW (arg1))
12397 pow2 = exact_log2 (TREE_INT_CST_LOW (arg1));
12398 else
12399 pow2 = exact_log2 (TREE_INT_CST_HIGH (arg1))
12400 + HOST_BITS_PER_WIDE_INT;
12401
12402 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12403 TREE_OPERAND (arg0, 0),
12404 build_int_cst (integer_type_node, pow2));
12405 }
12406 }
12407
12408 /* Fall through */
12409
12410 case FLOOR_DIV_EXPR:
12411 /* Simplify A / (B << N) where A and B are positive and B is
12412 a power of 2, to A >> (N + log2(B)). */
12413 strict_overflow_p = false;
12414 if (TREE_CODE (arg1) == LSHIFT_EXPR
12415 && (TYPE_UNSIGNED (type)
12416 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12417 {
12418 tree sval = TREE_OPERAND (arg1, 0);
12419 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
12420 {
12421 tree sh_cnt = TREE_OPERAND (arg1, 1);
12422 unsigned long pow2;
12423
12424 if (TREE_INT_CST_LOW (sval))
12425 pow2 = exact_log2 (TREE_INT_CST_LOW (sval));
12426 else
12427 pow2 = exact_log2 (TREE_INT_CST_HIGH (sval))
12428 + HOST_BITS_PER_WIDE_INT;
12429
12430 if (strict_overflow_p)
12431 fold_overflow_warning (("assuming signed overflow does not "
12432 "occur when simplifying A / (B << N)"),
12433 WARN_STRICT_OVERFLOW_MISC);
12434
12435 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
12436 sh_cnt,
12437 build_int_cst (TREE_TYPE (sh_cnt),
12438 pow2));
12439 return fold_build2_loc (loc, RSHIFT_EXPR, type,
12440 fold_convert_loc (loc, type, arg0), sh_cnt);
12441 }
12442 }
12443
12444 /* For unsigned integral types, FLOOR_DIV_EXPR is the same as
12445 TRUNC_DIV_EXPR. Rewrite into the latter in this case. */
12446 if (INTEGRAL_TYPE_P (type)
12447 && TYPE_UNSIGNED (type)
12448 && code == FLOOR_DIV_EXPR)
12449 return fold_build2_loc (loc, TRUNC_DIV_EXPR, type, op0, op1);
12450
12451 /* Fall through */
12452
12453 case ROUND_DIV_EXPR:
12454 case CEIL_DIV_EXPR:
12455 case EXACT_DIV_EXPR:
12456 if (integer_onep (arg1))
12457 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12458 if (integer_zerop (arg1))
12459 return NULL_TREE;
12460 /* X / -1 is -X. */
12461 if (!TYPE_UNSIGNED (type)
12462 && TREE_CODE (arg1) == INTEGER_CST
12463 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12464 && TREE_INT_CST_HIGH (arg1) == -1)
12465 return fold_convert_loc (loc, type, negate_expr (arg0));
12466
12467 /* Convert -A / -B to A / B when the type is signed and overflow is
12468 undefined. */
12469 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12470 && TREE_CODE (arg0) == NEGATE_EXPR
12471 && negate_expr_p (arg1))
12472 {
12473 if (INTEGRAL_TYPE_P (type))
12474 fold_overflow_warning (("assuming signed overflow does not occur "
12475 "when distributing negation across "
12476 "division"),
12477 WARN_STRICT_OVERFLOW_MISC);
12478 return fold_build2_loc (loc, code, type,
12479 fold_convert_loc (loc, type,
12480 TREE_OPERAND (arg0, 0)),
12481 fold_convert_loc (loc, type,
12482 negate_expr (arg1)));
12483 }
12484 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12485 && TREE_CODE (arg1) == NEGATE_EXPR
12486 && negate_expr_p (arg0))
12487 {
12488 if (INTEGRAL_TYPE_P (type))
12489 fold_overflow_warning (("assuming signed overflow does not occur "
12490 "when distributing negation across "
12491 "division"),
12492 WARN_STRICT_OVERFLOW_MISC);
12493 return fold_build2_loc (loc, code, type,
12494 fold_convert_loc (loc, type,
12495 negate_expr (arg0)),
12496 fold_convert_loc (loc, type,
12497 TREE_OPERAND (arg1, 0)));
12498 }
12499
12500 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
12501 operation, EXACT_DIV_EXPR.
12502
12503 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
12504 At one time others generated faster code, it's not clear if they do
12505 after the last round to changes to the DIV code in expmed.c. */
12506 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
12507 && multiple_of_p (type, arg0, arg1))
12508 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
12509
12510 strict_overflow_p = false;
12511 if (TREE_CODE (arg1) == INTEGER_CST
12512 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12513 &strict_overflow_p)))
12514 {
12515 if (strict_overflow_p)
12516 fold_overflow_warning (("assuming signed overflow does not occur "
12517 "when simplifying division"),
12518 WARN_STRICT_OVERFLOW_MISC);
12519 return fold_convert_loc (loc, type, tem);
12520 }
12521
12522 return NULL_TREE;
12523
12524 case CEIL_MOD_EXPR:
12525 case FLOOR_MOD_EXPR:
12526 case ROUND_MOD_EXPR:
12527 case TRUNC_MOD_EXPR:
12528 /* X % 1 is always zero, but be sure to preserve any side
12529 effects in X. */
12530 if (integer_onep (arg1))
12531 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12532
12533 /* X % 0, return X % 0 unchanged so that we can get the
12534 proper warnings and errors. */
12535 if (integer_zerop (arg1))
12536 return NULL_TREE;
12537
12538 /* 0 % X is always zero, but be sure to preserve any side
12539 effects in X. Place this after checking for X == 0. */
12540 if (integer_zerop (arg0))
12541 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12542
12543 /* X % -1 is zero. */
12544 if (!TYPE_UNSIGNED (type)
12545 && TREE_CODE (arg1) == INTEGER_CST
12546 && TREE_INT_CST_LOW (arg1) == HOST_WIDE_INT_M1U
12547 && TREE_INT_CST_HIGH (arg1) == -1)
12548 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12549
12550 /* X % -C is the same as X % C. */
12551 if (code == TRUNC_MOD_EXPR
12552 && !TYPE_UNSIGNED (type)
12553 && TREE_CODE (arg1) == INTEGER_CST
12554 && !TREE_OVERFLOW (arg1)
12555 && TREE_INT_CST_HIGH (arg1) < 0
12556 && !TYPE_OVERFLOW_TRAPS (type)
12557 /* Avoid this transformation if C is INT_MIN, i.e. C == -C. */
12558 && !sign_bit_p (arg1, arg1))
12559 return fold_build2_loc (loc, code, type,
12560 fold_convert_loc (loc, type, arg0),
12561 fold_convert_loc (loc, type,
12562 negate_expr (arg1)));
12563
12564 /* X % -Y is the same as X % Y. */
12565 if (code == TRUNC_MOD_EXPR
12566 && !TYPE_UNSIGNED (type)
12567 && TREE_CODE (arg1) == NEGATE_EXPR
12568 && !TYPE_OVERFLOW_TRAPS (type))
12569 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
12570 fold_convert_loc (loc, type,
12571 TREE_OPERAND (arg1, 0)));
12572
12573 strict_overflow_p = false;
12574 if (TREE_CODE (arg1) == INTEGER_CST
12575 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
12576 &strict_overflow_p)))
12577 {
12578 if (strict_overflow_p)
12579 fold_overflow_warning (("assuming signed overflow does not occur "
12580 "when simplifying modulus"),
12581 WARN_STRICT_OVERFLOW_MISC);
12582 return fold_convert_loc (loc, type, tem);
12583 }
12584
12585 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
12586 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
12587 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
12588 && (TYPE_UNSIGNED (type)
12589 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
12590 {
12591 tree c = arg1;
12592 /* Also optimize A % (C << N) where C is a power of 2,
12593 to A & ((C << N) - 1). */
12594 if (TREE_CODE (arg1) == LSHIFT_EXPR)
12595 c = TREE_OPERAND (arg1, 0);
12596
12597 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
12598 {
12599 tree mask
12600 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
12601 build_int_cst (TREE_TYPE (arg1), 1));
12602 if (strict_overflow_p)
12603 fold_overflow_warning (("assuming signed overflow does not "
12604 "occur when simplifying "
12605 "X % (power of two)"),
12606 WARN_STRICT_OVERFLOW_MISC);
12607 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12608 fold_convert_loc (loc, type, arg0),
12609 fold_convert_loc (loc, type, mask));
12610 }
12611 }
12612
12613 return NULL_TREE;
12614
12615 case LROTATE_EXPR:
12616 case RROTATE_EXPR:
12617 if (integer_all_onesp (arg0))
12618 return omit_one_operand_loc (loc, type, arg0, arg1);
12619 goto shift;
12620
12621 case RSHIFT_EXPR:
12622 /* Optimize -1 >> x for arithmetic right shifts. */
12623 if (integer_all_onesp (arg0) && !TYPE_UNSIGNED (type)
12624 && tree_expr_nonnegative_p (arg1))
12625 return omit_one_operand_loc (loc, type, arg0, arg1);
12626 /* ... fall through ... */
12627
12628 case LSHIFT_EXPR:
12629 shift:
12630 if (integer_zerop (arg1))
12631 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12632 if (integer_zerop (arg0))
12633 return omit_one_operand_loc (loc, type, arg0, arg1);
12634
12635 /* Prefer vector1 << scalar to vector1 << vector2
12636 if vector2 is uniform. */
12637 if (VECTOR_TYPE_P (TREE_TYPE (arg1))
12638 && (tem = uniform_vector_p (arg1)) != NULL_TREE)
12639 return fold_build2_loc (loc, code, type, op0, tem);
12640
12641 /* Since negative shift count is not well-defined,
12642 don't try to compute it in the compiler. */
12643 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12644 return NULL_TREE;
12645
12646 prec = element_precision (type);
12647
12648 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12649 if (TREE_CODE (op0) == code && host_integerp (arg1, true)
12650 && TREE_INT_CST_LOW (arg1) < prec
12651 && host_integerp (TREE_OPERAND (arg0, 1), true)
12652 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12653 {
12654 unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1))
12655 + TREE_INT_CST_LOW (arg1));
12656
12657 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12658 being well defined. */
12659 if (low >= prec)
12660 {
12661 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12662 low = low % prec;
12663 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12664 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12665 TREE_OPERAND (arg0, 0));
12666 else
12667 low = prec - 1;
12668 }
12669
12670 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12671 build_int_cst (TREE_TYPE (arg1), low));
12672 }
12673
12674 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12675 into x & ((unsigned)-1 >> c) for unsigned types. */
12676 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12677 || (TYPE_UNSIGNED (type)
12678 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12679 && host_integerp (arg1, false)
12680 && TREE_INT_CST_LOW (arg1) < prec
12681 && host_integerp (TREE_OPERAND (arg0, 1), false)
12682 && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec)
12683 {
12684 HOST_WIDE_INT low0 = TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1));
12685 HOST_WIDE_INT low1 = TREE_INT_CST_LOW (arg1);
12686 tree lshift;
12687 tree arg00;
12688
12689 if (low0 == low1)
12690 {
12691 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12692
12693 lshift = build_minus_one_cst (type);
12694 lshift = const_binop (code, lshift, arg1);
12695
12696 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12697 }
12698 }
12699
12700 /* Rewrite an LROTATE_EXPR by a constant into an
12701 RROTATE_EXPR by a new constant. */
12702 if (code == LROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST)
12703 {
12704 tree tem = build_int_cst (TREE_TYPE (arg1), prec);
12705 tem = const_binop (MINUS_EXPR, tem, arg1);
12706 return fold_build2_loc (loc, RROTATE_EXPR, type, op0, tem);
12707 }
12708
12709 /* If we have a rotate of a bit operation with the rotate count and
12710 the second operand of the bit operation both constant,
12711 permute the two operations. */
12712 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12713 && (TREE_CODE (arg0) == BIT_AND_EXPR
12714 || TREE_CODE (arg0) == BIT_IOR_EXPR
12715 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12716 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12717 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12718 fold_build2_loc (loc, code, type,
12719 TREE_OPERAND (arg0, 0), arg1),
12720 fold_build2_loc (loc, code, type,
12721 TREE_OPERAND (arg0, 1), arg1));
12722
12723 /* Two consecutive rotates adding up to the precision of the
12724 type can be ignored. */
12725 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12726 && TREE_CODE (arg0) == RROTATE_EXPR
12727 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12728 && TREE_INT_CST_HIGH (arg1) == 0
12729 && TREE_INT_CST_HIGH (TREE_OPERAND (arg0, 1)) == 0
12730 && ((TREE_INT_CST_LOW (arg1)
12731 + TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)))
12732 == prec))
12733 return TREE_OPERAND (arg0, 0);
12734
12735 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12736 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12737 if the latter can be further optimized. */
12738 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12739 && TREE_CODE (arg0) == BIT_AND_EXPR
12740 && TREE_CODE (arg1) == INTEGER_CST
12741 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12742 {
12743 tree mask = fold_build2_loc (loc, code, type,
12744 fold_convert_loc (loc, type,
12745 TREE_OPERAND (arg0, 1)),
12746 arg1);
12747 tree shift = fold_build2_loc (loc, code, type,
12748 fold_convert_loc (loc, type,
12749 TREE_OPERAND (arg0, 0)),
12750 arg1);
12751 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12752 if (tem)
12753 return tem;
12754 }
12755
12756 return NULL_TREE;
12757
12758 case MIN_EXPR:
12759 if (operand_equal_p (arg0, arg1, 0))
12760 return omit_one_operand_loc (loc, type, arg0, arg1);
12761 if (INTEGRAL_TYPE_P (type)
12762 && operand_equal_p (arg1, TYPE_MIN_VALUE (type), OEP_ONLY_CONST))
12763 return omit_one_operand_loc (loc, type, arg1, arg0);
12764 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12765 if (tem)
12766 return tem;
12767 goto associate;
12768
12769 case MAX_EXPR:
12770 if (operand_equal_p (arg0, arg1, 0))
12771 return omit_one_operand_loc (loc, type, arg0, arg1);
12772 if (INTEGRAL_TYPE_P (type)
12773 && TYPE_MAX_VALUE (type)
12774 && operand_equal_p (arg1, TYPE_MAX_VALUE (type), OEP_ONLY_CONST))
12775 return omit_one_operand_loc (loc, type, arg1, arg0);
12776 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12777 if (tem)
12778 return tem;
12779 goto associate;
12780
12781 case TRUTH_ANDIF_EXPR:
12782 /* Note that the operands of this must be ints
12783 and their values must be 0 or 1.
12784 ("true" is a fixed value perhaps depending on the language.) */
12785 /* If first arg is constant zero, return it. */
12786 if (integer_zerop (arg0))
12787 return fold_convert_loc (loc, type, arg0);
12788 case TRUTH_AND_EXPR:
12789 /* If either arg is constant true, drop it. */
12790 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12791 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12792 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12793 /* Preserve sequence points. */
12794 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12795 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12796 /* If second arg is constant zero, result is zero, but first arg
12797 must be evaluated. */
12798 if (integer_zerop (arg1))
12799 return omit_one_operand_loc (loc, type, arg1, arg0);
12800 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12801 case will be handled here. */
12802 if (integer_zerop (arg0))
12803 return omit_one_operand_loc (loc, type, arg0, arg1);
12804
12805 /* !X && X is always false. */
12806 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12807 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12808 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12809 /* X && !X is always false. */
12810 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12811 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12812 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12813
12814 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12815 means A >= Y && A != MAX, but in this case we know that
12816 A < X <= MAX. */
12817
12818 if (!TREE_SIDE_EFFECTS (arg0)
12819 && !TREE_SIDE_EFFECTS (arg1))
12820 {
12821 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12822 if (tem && !operand_equal_p (tem, arg0, 0))
12823 return fold_build2_loc (loc, code, type, tem, arg1);
12824
12825 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12826 if (tem && !operand_equal_p (tem, arg1, 0))
12827 return fold_build2_loc (loc, code, type, arg0, tem);
12828 }
12829
12830 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12831 != NULL_TREE)
12832 return tem;
12833
12834 return NULL_TREE;
12835
12836 case TRUTH_ORIF_EXPR:
12837 /* Note that the operands of this must be ints
12838 and their values must be 0 or true.
12839 ("true" is a fixed value perhaps depending on the language.) */
12840 /* If first arg is constant true, return it. */
12841 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12842 return fold_convert_loc (loc, type, arg0);
12843 case TRUTH_OR_EXPR:
12844 /* If either arg is constant zero, drop it. */
12845 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12846 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12847 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12848 /* Preserve sequence points. */
12849 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12850 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12851 /* If second arg is constant true, result is true, but we must
12852 evaluate first arg. */
12853 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12854 return omit_one_operand_loc (loc, type, arg1, arg0);
12855 /* Likewise for first arg, but note this only occurs here for
12856 TRUTH_OR_EXPR. */
12857 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12858 return omit_one_operand_loc (loc, type, arg0, arg1);
12859
12860 /* !X || X is always true. */
12861 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12862 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12863 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12864 /* X || !X is always true. */
12865 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12866 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12867 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12868
12869 /* (X && !Y) || (!X && Y) is X ^ Y */
12870 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12871 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12872 {
12873 tree a0, a1, l0, l1, n0, n1;
12874
12875 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12876 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12877
12878 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12879 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12880
12881 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12882 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12883
12884 if ((operand_equal_p (n0, a0, 0)
12885 && operand_equal_p (n1, a1, 0))
12886 || (operand_equal_p (n0, a1, 0)
12887 && operand_equal_p (n1, a0, 0)))
12888 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12889 }
12890
12891 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12892 != NULL_TREE)
12893 return tem;
12894
12895 return NULL_TREE;
12896
12897 case TRUTH_XOR_EXPR:
12898 /* If the second arg is constant zero, drop it. */
12899 if (integer_zerop (arg1))
12900 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12901 /* If the second arg is constant true, this is a logical inversion. */
12902 if (integer_onep (arg1))
12903 {
12904 tem = invert_truthvalue_loc (loc, arg0);
12905 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12906 }
12907 /* Identical arguments cancel to zero. */
12908 if (operand_equal_p (arg0, arg1, 0))
12909 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12910
12911 /* !X ^ X is always true. */
12912 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12913 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12914 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12915
12916 /* X ^ !X is always true. */
12917 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12918 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12919 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12920
12921 return NULL_TREE;
12922
12923 case EQ_EXPR:
12924 case NE_EXPR:
12925 STRIP_NOPS (arg0);
12926 STRIP_NOPS (arg1);
12927
12928 tem = fold_comparison (loc, code, type, op0, op1);
12929 if (tem != NULL_TREE)
12930 return tem;
12931
12932 /* bool_var != 0 becomes bool_var. */
12933 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12934 && code == NE_EXPR)
12935 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12936
12937 /* bool_var == 1 becomes bool_var. */
12938 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12939 && code == EQ_EXPR)
12940 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12941
12942 /* bool_var != 1 becomes !bool_var. */
12943 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12944 && code == NE_EXPR)
12945 return fold_convert_loc (loc, type,
12946 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12947 TREE_TYPE (arg0), arg0));
12948
12949 /* bool_var == 0 becomes !bool_var. */
12950 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12951 && code == EQ_EXPR)
12952 return fold_convert_loc (loc, type,
12953 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12954 TREE_TYPE (arg0), arg0));
12955
12956 /* !exp != 0 becomes !exp */
12957 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12958 && code == NE_EXPR)
12959 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12960
12961 /* If this is an equality comparison of the address of two non-weak,
12962 unaliased symbols neither of which are extern (since we do not
12963 have access to attributes for externs), then we know the result. */
12964 if (TREE_CODE (arg0) == ADDR_EXPR
12965 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg0, 0))
12966 && ! DECL_WEAK (TREE_OPERAND (arg0, 0))
12967 && ! lookup_attribute ("alias",
12968 DECL_ATTRIBUTES (TREE_OPERAND (arg0, 0)))
12969 && ! DECL_EXTERNAL (TREE_OPERAND (arg0, 0))
12970 && TREE_CODE (arg1) == ADDR_EXPR
12971 && VAR_OR_FUNCTION_DECL_P (TREE_OPERAND (arg1, 0))
12972 && ! DECL_WEAK (TREE_OPERAND (arg1, 0))
12973 && ! lookup_attribute ("alias",
12974 DECL_ATTRIBUTES (TREE_OPERAND (arg1, 0)))
12975 && ! DECL_EXTERNAL (TREE_OPERAND (arg1, 0)))
12976 {
12977 /* We know that we're looking at the address of two
12978 non-weak, unaliased, static _DECL nodes.
12979
12980 It is both wasteful and incorrect to call operand_equal_p
12981 to compare the two ADDR_EXPR nodes. It is wasteful in that
12982 all we need to do is test pointer equality for the arguments
12983 to the two ADDR_EXPR nodes. It is incorrect to use
12984 operand_equal_p as that function is NOT equivalent to a
12985 C equality test. It can in fact return false for two
12986 objects which would test as equal using the C equality
12987 operator. */
12988 bool equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12989 return constant_boolean_node (equal
12990 ? code == EQ_EXPR : code != EQ_EXPR,
12991 type);
12992 }
12993
12994 /* If this is an EQ or NE comparison of a constant with a PLUS_EXPR or
12995 a MINUS_EXPR of a constant, we can convert it into a comparison with
12996 a revised constant as long as no overflow occurs. */
12997 if (TREE_CODE (arg1) == INTEGER_CST
12998 && (TREE_CODE (arg0) == PLUS_EXPR
12999 || TREE_CODE (arg0) == MINUS_EXPR)
13000 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13001 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
13002 ? MINUS_EXPR : PLUS_EXPR,
13003 fold_convert_loc (loc, TREE_TYPE (arg0),
13004 arg1),
13005 TREE_OPERAND (arg0, 1)))
13006 && !TREE_OVERFLOW (tem))
13007 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13008
13009 /* Similarly for a NEGATE_EXPR. */
13010 if (TREE_CODE (arg0) == NEGATE_EXPR
13011 && TREE_CODE (arg1) == INTEGER_CST
13012 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
13013 arg1)))
13014 && TREE_CODE (tem) == INTEGER_CST
13015 && !TREE_OVERFLOW (tem))
13016 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
13017
13018 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
13019 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13020 && TREE_CODE (arg1) == INTEGER_CST
13021 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13022 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13023 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
13024 fold_convert_loc (loc,
13025 TREE_TYPE (arg0),
13026 arg1),
13027 TREE_OPERAND (arg0, 1)));
13028
13029 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
13030 if ((TREE_CODE (arg0) == PLUS_EXPR
13031 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
13032 || TREE_CODE (arg0) == MINUS_EXPR)
13033 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13034 0)),
13035 arg1, 0)
13036 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13037 || POINTER_TYPE_P (TREE_TYPE (arg0))))
13038 {
13039 tree val = TREE_OPERAND (arg0, 1);
13040 return omit_two_operands_loc (loc, type,
13041 fold_build2_loc (loc, code, type,
13042 val,
13043 build_int_cst (TREE_TYPE (val),
13044 0)),
13045 TREE_OPERAND (arg0, 0), arg1);
13046 }
13047
13048 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
13049 if (TREE_CODE (arg0) == MINUS_EXPR
13050 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
13051 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
13052 1)),
13053 arg1, 0)
13054 && (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 0)) & 1) == 1)
13055 {
13056 return omit_two_operands_loc (loc, type,
13057 code == NE_EXPR
13058 ? boolean_true_node : boolean_false_node,
13059 TREE_OPERAND (arg0, 1), arg1);
13060 }
13061
13062 /* If we have X - Y == 0, we can convert that to X == Y and similarly
13063 for !=. Don't do this for ordered comparisons due to overflow. */
13064 if (TREE_CODE (arg0) == MINUS_EXPR
13065 && integer_zerop (arg1))
13066 return fold_build2_loc (loc, code, type,
13067 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
13068
13069 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
13070 if (TREE_CODE (arg0) == ABS_EXPR
13071 && (integer_zerop (arg1) || real_zerop (arg1)))
13072 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
13073
13074 /* If this is an EQ or NE comparison with zero and ARG0 is
13075 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
13076 two operations, but the latter can be done in one less insn
13077 on machines that have only two-operand insns or on which a
13078 constant cannot be the first operand. */
13079 if (TREE_CODE (arg0) == BIT_AND_EXPR
13080 && integer_zerop (arg1))
13081 {
13082 tree arg00 = TREE_OPERAND (arg0, 0);
13083 tree arg01 = TREE_OPERAND (arg0, 1);
13084 if (TREE_CODE (arg00) == LSHIFT_EXPR
13085 && integer_onep (TREE_OPERAND (arg00, 0)))
13086 {
13087 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
13088 arg01, TREE_OPERAND (arg00, 1));
13089 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13090 build_int_cst (TREE_TYPE (arg0), 1));
13091 return fold_build2_loc (loc, code, type,
13092 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13093 arg1);
13094 }
13095 else if (TREE_CODE (arg01) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg01, 0)))
13097 {
13098 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
13099 arg00, TREE_OPERAND (arg01, 1));
13100 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
13101 build_int_cst (TREE_TYPE (arg0), 1));
13102 return fold_build2_loc (loc, code, type,
13103 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
13104 arg1);
13105 }
13106 }
13107
13108 /* If this is an NE or EQ comparison of zero against the result of a
13109 signed MOD operation whose second operand is a power of 2, make
13110 the MOD operation unsigned since it is simpler and equivalent. */
13111 if (integer_zerop (arg1)
13112 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
13113 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
13114 || TREE_CODE (arg0) == CEIL_MOD_EXPR
13115 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
13116 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
13117 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13118 {
13119 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
13120 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
13121 fold_convert_loc (loc, newtype,
13122 TREE_OPERAND (arg0, 0)),
13123 fold_convert_loc (loc, newtype,
13124 TREE_OPERAND (arg0, 1)));
13125
13126 return fold_build2_loc (loc, code, type, newmod,
13127 fold_convert_loc (loc, newtype, arg1));
13128 }
13129
13130 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
13131 C1 is a valid shift constant, and C2 is a power of two, i.e.
13132 a single bit. */
13133 if (TREE_CODE (arg0) == BIT_AND_EXPR
13134 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
13135 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
13136 == INTEGER_CST
13137 && integer_pow2p (TREE_OPERAND (arg0, 1))
13138 && integer_zerop (arg1))
13139 {
13140 tree itype = TREE_TYPE (arg0);
13141 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
13142 prec = TYPE_PRECISION (itype);
13143
13144 /* Check for a valid shift count. */
13145 if (TREE_INT_CST_HIGH (arg001) == 0
13146 && TREE_INT_CST_LOW (arg001) < prec)
13147 {
13148 tree arg01 = TREE_OPERAND (arg0, 1);
13149 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13150 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
13151 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
13152 can be rewritten as (X & (C2 << C1)) != 0. */
13153 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
13154 {
13155 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
13156 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
13157 return fold_build2_loc (loc, code, type, tem,
13158 fold_convert_loc (loc, itype, arg1));
13159 }
13160 /* Otherwise, for signed (arithmetic) shifts,
13161 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
13162 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
13163 else if (!TYPE_UNSIGNED (itype))
13164 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
13165 arg000, build_int_cst (itype, 0));
13166 /* Otherwise, of unsigned (logical) shifts,
13167 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
13168 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
13169 else
13170 return omit_one_operand_loc (loc, type,
13171 code == EQ_EXPR ? integer_one_node
13172 : integer_zero_node,
13173 arg000);
13174 }
13175 }
13176
13177 /* If we have (A & C) == C where C is a power of 2, convert this into
13178 (A & C) != 0. Similarly for NE_EXPR. */
13179 if (TREE_CODE (arg0) == BIT_AND_EXPR
13180 && integer_pow2p (TREE_OPERAND (arg0, 1))
13181 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13182 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13183 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
13184 integer_zero_node));
13185
13186 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
13187 bit, then fold the expression into A < 0 or A >= 0. */
13188 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
13189 if (tem)
13190 return tem;
13191
13192 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
13193 Similarly for NE_EXPR. */
13194 if (TREE_CODE (arg0) == BIT_AND_EXPR
13195 && TREE_CODE (arg1) == INTEGER_CST
13196 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13197 {
13198 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
13199 TREE_TYPE (TREE_OPERAND (arg0, 1)),
13200 TREE_OPERAND (arg0, 1));
13201 tree dandnotc
13202 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13203 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
13204 notc);
13205 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13206 if (integer_nonzerop (dandnotc))
13207 return omit_one_operand_loc (loc, type, rslt, arg0);
13208 }
13209
13210 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
13211 Similarly for NE_EXPR. */
13212 if (TREE_CODE (arg0) == BIT_IOR_EXPR
13213 && TREE_CODE (arg1) == INTEGER_CST
13214 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13215 {
13216 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
13217 tree candnotd
13218 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13219 TREE_OPERAND (arg0, 1),
13220 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
13221 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
13222 if (integer_nonzerop (candnotd))
13223 return omit_one_operand_loc (loc, type, rslt, arg0);
13224 }
13225
13226 /* If this is a comparison of a field, we may be able to simplify it. */
13227 if ((TREE_CODE (arg0) == COMPONENT_REF
13228 || TREE_CODE (arg0) == BIT_FIELD_REF)
13229 /* Handle the constant case even without -O
13230 to make sure the warnings are given. */
13231 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
13232 {
13233 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
13234 if (t1)
13235 return t1;
13236 }
13237
13238 /* Optimize comparisons of strlen vs zero to a compare of the
13239 first character of the string vs zero. To wit,
13240 strlen(ptr) == 0 => *ptr == 0
13241 strlen(ptr) != 0 => *ptr != 0
13242 Other cases should reduce to one of these two (or a constant)
13243 due to the return value of strlen being unsigned. */
13244 if (TREE_CODE (arg0) == CALL_EXPR
13245 && integer_zerop (arg1))
13246 {
13247 tree fndecl = get_callee_fndecl (arg0);
13248
13249 if (fndecl
13250 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
13251 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
13252 && call_expr_nargs (arg0) == 1
13253 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
13254 {
13255 tree iref = build_fold_indirect_ref_loc (loc,
13256 CALL_EXPR_ARG (arg0, 0));
13257 return fold_build2_loc (loc, code, type, iref,
13258 build_int_cst (TREE_TYPE (iref), 0));
13259 }
13260 }
13261
13262 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
13263 of X. Similarly fold (X >> C) == 0 into X >= 0. */
13264 if (TREE_CODE (arg0) == RSHIFT_EXPR
13265 && integer_zerop (arg1)
13266 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13267 {
13268 tree arg00 = TREE_OPERAND (arg0, 0);
13269 tree arg01 = TREE_OPERAND (arg0, 1);
13270 tree itype = TREE_TYPE (arg00);
13271 if (TREE_INT_CST_HIGH (arg01) == 0
13272 && TREE_INT_CST_LOW (arg01)
13273 == (unsigned HOST_WIDE_INT) (TYPE_PRECISION (itype) - 1))
13274 {
13275 if (TYPE_UNSIGNED (itype))
13276 {
13277 itype = signed_type_for (itype);
13278 arg00 = fold_convert_loc (loc, itype, arg00);
13279 }
13280 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
13281 type, arg00, build_zero_cst (itype));
13282 }
13283 }
13284
13285 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
13286 if (integer_zerop (arg1)
13287 && TREE_CODE (arg0) == BIT_XOR_EXPR)
13288 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13289 TREE_OPERAND (arg0, 1));
13290
13291 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
13292 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13293 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
13294 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13295 build_zero_cst (TREE_TYPE (arg0)));
13296 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
13297 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13298 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13299 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
13300 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
13301 build_zero_cst (TREE_TYPE (arg0)));
13302
13303 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
13304 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13305 && TREE_CODE (arg1) == INTEGER_CST
13306 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
13307 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
13308 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
13309 TREE_OPERAND (arg0, 1), arg1));
13310
13311 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
13312 (X & C) == 0 when C is a single bit. */
13313 if (TREE_CODE (arg0) == BIT_AND_EXPR
13314 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
13315 && integer_zerop (arg1)
13316 && integer_pow2p (TREE_OPERAND (arg0, 1)))
13317 {
13318 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
13319 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
13320 TREE_OPERAND (arg0, 1));
13321 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
13322 type, tem,
13323 fold_convert_loc (loc, TREE_TYPE (arg0),
13324 arg1));
13325 }
13326
13327 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
13328 constant C is a power of two, i.e. a single bit. */
13329 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13330 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13331 && integer_zerop (arg1)
13332 && integer_pow2p (TREE_OPERAND (arg0, 1))
13333 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13334 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13335 {
13336 tree arg00 = TREE_OPERAND (arg0, 0);
13337 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13338 arg00, build_int_cst (TREE_TYPE (arg00), 0));
13339 }
13340
13341 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
13342 when is C is a power of two, i.e. a single bit. */
13343 if (TREE_CODE (arg0) == BIT_AND_EXPR
13344 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
13345 && integer_zerop (arg1)
13346 && integer_pow2p (TREE_OPERAND (arg0, 1))
13347 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13348 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
13349 {
13350 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
13351 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
13352 arg000, TREE_OPERAND (arg0, 1));
13353 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
13354 tem, build_int_cst (TREE_TYPE (tem), 0));
13355 }
13356
13357 if (integer_zerop (arg1)
13358 && tree_expr_nonzero_p (arg0))
13359 {
13360 tree res = constant_boolean_node (code==NE_EXPR, type);
13361 return omit_one_operand_loc (loc, type, res, arg0);
13362 }
13363
13364 /* Fold -X op -Y as X op Y, where op is eq/ne. */
13365 if (TREE_CODE (arg0) == NEGATE_EXPR
13366 && TREE_CODE (arg1) == NEGATE_EXPR)
13367 return fold_build2_loc (loc, code, type,
13368 TREE_OPERAND (arg0, 0),
13369 fold_convert_loc (loc, TREE_TYPE (arg0),
13370 TREE_OPERAND (arg1, 0)));
13371
13372 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
13373 if (TREE_CODE (arg0) == BIT_AND_EXPR
13374 && TREE_CODE (arg1) == BIT_AND_EXPR)
13375 {
13376 tree arg00 = TREE_OPERAND (arg0, 0);
13377 tree arg01 = TREE_OPERAND (arg0, 1);
13378 tree arg10 = TREE_OPERAND (arg1, 0);
13379 tree arg11 = TREE_OPERAND (arg1, 1);
13380 tree itype = TREE_TYPE (arg0);
13381
13382 if (operand_equal_p (arg01, arg11, 0))
13383 return fold_build2_loc (loc, code, type,
13384 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13385 fold_build2_loc (loc,
13386 BIT_XOR_EXPR, itype,
13387 arg00, arg10),
13388 arg01),
13389 build_zero_cst (itype));
13390
13391 if (operand_equal_p (arg01, arg10, 0))
13392 return fold_build2_loc (loc, code, type,
13393 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13394 fold_build2_loc (loc,
13395 BIT_XOR_EXPR, itype,
13396 arg00, arg11),
13397 arg01),
13398 build_zero_cst (itype));
13399
13400 if (operand_equal_p (arg00, arg11, 0))
13401 return fold_build2_loc (loc, code, type,
13402 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13403 fold_build2_loc (loc,
13404 BIT_XOR_EXPR, itype,
13405 arg01, arg10),
13406 arg00),
13407 build_zero_cst (itype));
13408
13409 if (operand_equal_p (arg00, arg10, 0))
13410 return fold_build2_loc (loc, code, type,
13411 fold_build2_loc (loc, BIT_AND_EXPR, itype,
13412 fold_build2_loc (loc,
13413 BIT_XOR_EXPR, itype,
13414 arg01, arg11),
13415 arg00),
13416 build_zero_cst (itype));
13417 }
13418
13419 if (TREE_CODE (arg0) == BIT_XOR_EXPR
13420 && TREE_CODE (arg1) == BIT_XOR_EXPR)
13421 {
13422 tree arg00 = TREE_OPERAND (arg0, 0);
13423 tree arg01 = TREE_OPERAND (arg0, 1);
13424 tree arg10 = TREE_OPERAND (arg1, 0);
13425 tree arg11 = TREE_OPERAND (arg1, 1);
13426 tree itype = TREE_TYPE (arg0);
13427
13428 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
13429 operand_equal_p guarantees no side-effects so we don't need
13430 to use omit_one_operand on Z. */
13431 if (operand_equal_p (arg01, arg11, 0))
13432 return fold_build2_loc (loc, code, type, arg00,
13433 fold_convert_loc (loc, TREE_TYPE (arg00),
13434 arg10));
13435 if (operand_equal_p (arg01, arg10, 0))
13436 return fold_build2_loc (loc, code, type, arg00,
13437 fold_convert_loc (loc, TREE_TYPE (arg00),
13438 arg11));
13439 if (operand_equal_p (arg00, arg11, 0))
13440 return fold_build2_loc (loc, code, type, arg01,
13441 fold_convert_loc (loc, TREE_TYPE (arg01),
13442 arg10));
13443 if (operand_equal_p (arg00, arg10, 0))
13444 return fold_build2_loc (loc, code, type, arg01,
13445 fold_convert_loc (loc, TREE_TYPE (arg01),
13446 arg11));
13447
13448 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
13449 if (TREE_CODE (arg01) == INTEGER_CST
13450 && TREE_CODE (arg11) == INTEGER_CST)
13451 {
13452 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
13453 fold_convert_loc (loc, itype, arg11));
13454 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
13455 return fold_build2_loc (loc, code, type, tem,
13456 fold_convert_loc (loc, itype, arg10));
13457 }
13458 }
13459
13460 /* Attempt to simplify equality/inequality comparisons of complex
13461 values. Only lower the comparison if the result is known or
13462 can be simplified to a single scalar comparison. */
13463 if ((TREE_CODE (arg0) == COMPLEX_EXPR
13464 || TREE_CODE (arg0) == COMPLEX_CST)
13465 && (TREE_CODE (arg1) == COMPLEX_EXPR
13466 || TREE_CODE (arg1) == COMPLEX_CST))
13467 {
13468 tree real0, imag0, real1, imag1;
13469 tree rcond, icond;
13470
13471 if (TREE_CODE (arg0) == COMPLEX_EXPR)
13472 {
13473 real0 = TREE_OPERAND (arg0, 0);
13474 imag0 = TREE_OPERAND (arg0, 1);
13475 }
13476 else
13477 {
13478 real0 = TREE_REALPART (arg0);
13479 imag0 = TREE_IMAGPART (arg0);
13480 }
13481
13482 if (TREE_CODE (arg1) == COMPLEX_EXPR)
13483 {
13484 real1 = TREE_OPERAND (arg1, 0);
13485 imag1 = TREE_OPERAND (arg1, 1);
13486 }
13487 else
13488 {
13489 real1 = TREE_REALPART (arg1);
13490 imag1 = TREE_IMAGPART (arg1);
13491 }
13492
13493 rcond = fold_binary_loc (loc, code, type, real0, real1);
13494 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
13495 {
13496 if (integer_zerop (rcond))
13497 {
13498 if (code == EQ_EXPR)
13499 return omit_two_operands_loc (loc, type, boolean_false_node,
13500 imag0, imag1);
13501 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
13502 }
13503 else
13504 {
13505 if (code == NE_EXPR)
13506 return omit_two_operands_loc (loc, type, boolean_true_node,
13507 imag0, imag1);
13508 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
13509 }
13510 }
13511
13512 icond = fold_binary_loc (loc, code, type, imag0, imag1);
13513 if (icond && TREE_CODE (icond) == INTEGER_CST)
13514 {
13515 if (integer_zerop (icond))
13516 {
13517 if (code == EQ_EXPR)
13518 return omit_two_operands_loc (loc, type, boolean_false_node,
13519 real0, real1);
13520 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
13521 }
13522 else
13523 {
13524 if (code == NE_EXPR)
13525 return omit_two_operands_loc (loc, type, boolean_true_node,
13526 real0, real1);
13527 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
13528 }
13529 }
13530 }
13531
13532 return NULL_TREE;
13533
13534 case LT_EXPR:
13535 case GT_EXPR:
13536 case LE_EXPR:
13537 case GE_EXPR:
13538 tem = fold_comparison (loc, code, type, op0, op1);
13539 if (tem != NULL_TREE)
13540 return tem;
13541
13542 /* Transform comparisons of the form X +- C CMP X. */
13543 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
13544 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
13545 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
13546 && !HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0))))
13547 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
13548 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
13549 {
13550 tree arg01 = TREE_OPERAND (arg0, 1);
13551 enum tree_code code0 = TREE_CODE (arg0);
13552 int is_positive;
13553
13554 if (TREE_CODE (arg01) == REAL_CST)
13555 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
13556 else
13557 is_positive = tree_int_cst_sgn (arg01);
13558
13559 /* (X - c) > X becomes false. */
13560 if (code == GT_EXPR
13561 && ((code0 == MINUS_EXPR && is_positive >= 0)
13562 || (code0 == PLUS_EXPR && is_positive <= 0)))
13563 {
13564 if (TREE_CODE (arg01) == INTEGER_CST
13565 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13566 fold_overflow_warning (("assuming signed overflow does not "
13567 "occur when assuming that (X - c) > X "
13568 "is always false"),
13569 WARN_STRICT_OVERFLOW_ALL);
13570 return constant_boolean_node (0, type);
13571 }
13572
13573 /* Likewise (X + c) < X becomes false. */
13574 if (code == LT_EXPR
13575 && ((code0 == PLUS_EXPR && is_positive >= 0)
13576 || (code0 == MINUS_EXPR && is_positive <= 0)))
13577 {
13578 if (TREE_CODE (arg01) == INTEGER_CST
13579 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13580 fold_overflow_warning (("assuming signed overflow does not "
13581 "occur when assuming that "
13582 "(X + c) < X is always false"),
13583 WARN_STRICT_OVERFLOW_ALL);
13584 return constant_boolean_node (0, type);
13585 }
13586
13587 /* Convert (X - c) <= X to true. */
13588 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13589 && code == LE_EXPR
13590 && ((code0 == MINUS_EXPR && is_positive >= 0)
13591 || (code0 == PLUS_EXPR && is_positive <= 0)))
13592 {
13593 if (TREE_CODE (arg01) == INTEGER_CST
13594 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13595 fold_overflow_warning (("assuming signed overflow does not "
13596 "occur when assuming that "
13597 "(X - c) <= X is always true"),
13598 WARN_STRICT_OVERFLOW_ALL);
13599 return constant_boolean_node (1, type);
13600 }
13601
13602 /* Convert (X + c) >= X to true. */
13603 if (!HONOR_NANS (TYPE_MODE (TREE_TYPE (arg1)))
13604 && code == GE_EXPR
13605 && ((code0 == PLUS_EXPR && is_positive >= 0)
13606 || (code0 == MINUS_EXPR && is_positive <= 0)))
13607 {
13608 if (TREE_CODE (arg01) == INTEGER_CST
13609 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13610 fold_overflow_warning (("assuming signed overflow does not "
13611 "occur when assuming that "
13612 "(X + c) >= X is always true"),
13613 WARN_STRICT_OVERFLOW_ALL);
13614 return constant_boolean_node (1, type);
13615 }
13616
13617 if (TREE_CODE (arg01) == INTEGER_CST)
13618 {
13619 /* Convert X + c > X and X - c < X to true for integers. */
13620 if (code == GT_EXPR
13621 && ((code0 == PLUS_EXPR && is_positive > 0)
13622 || (code0 == MINUS_EXPR && is_positive < 0)))
13623 {
13624 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13625 fold_overflow_warning (("assuming signed overflow does "
13626 "not occur when assuming that "
13627 "(X + c) > X is always true"),
13628 WARN_STRICT_OVERFLOW_ALL);
13629 return constant_boolean_node (1, type);
13630 }
13631
13632 if (code == LT_EXPR
13633 && ((code0 == MINUS_EXPR && is_positive > 0)
13634 || (code0 == PLUS_EXPR && is_positive < 0)))
13635 {
13636 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13637 fold_overflow_warning (("assuming signed overflow does "
13638 "not occur when assuming that "
13639 "(X - c) < X is always true"),
13640 WARN_STRICT_OVERFLOW_ALL);
13641 return constant_boolean_node (1, type);
13642 }
13643
13644 /* Convert X + c <= X and X - c >= X to false for integers. */
13645 if (code == LE_EXPR
13646 && ((code0 == PLUS_EXPR && is_positive > 0)
13647 || (code0 == MINUS_EXPR && is_positive < 0)))
13648 {
13649 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13650 fold_overflow_warning (("assuming signed overflow does "
13651 "not occur when assuming that "
13652 "(X + c) <= X is always false"),
13653 WARN_STRICT_OVERFLOW_ALL);
13654 return constant_boolean_node (0, type);
13655 }
13656
13657 if (code == GE_EXPR
13658 && ((code0 == MINUS_EXPR && is_positive > 0)
13659 || (code0 == PLUS_EXPR && is_positive < 0)))
13660 {
13661 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
13662 fold_overflow_warning (("assuming signed overflow does "
13663 "not occur when assuming that "
13664 "(X - c) >= X is always false"),
13665 WARN_STRICT_OVERFLOW_ALL);
13666 return constant_boolean_node (0, type);
13667 }
13668 }
13669 }
13670
13671 /* Comparisons with the highest or lowest possible integer of
13672 the specified precision will have known values. */
13673 {
13674 tree arg1_type = TREE_TYPE (arg1);
13675 unsigned int width = TYPE_PRECISION (arg1_type);
13676
13677 if (TREE_CODE (arg1) == INTEGER_CST
13678 && width <= HOST_BITS_PER_DOUBLE_INT
13679 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
13680 {
13681 HOST_WIDE_INT signed_max_hi;
13682 unsigned HOST_WIDE_INT signed_max_lo;
13683 unsigned HOST_WIDE_INT max_hi, max_lo, min_hi, min_lo;
13684
13685 if (width <= HOST_BITS_PER_WIDE_INT)
13686 {
13687 signed_max_lo = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13688 - 1;
13689 signed_max_hi = 0;
13690 max_hi = 0;
13691
13692 if (TYPE_UNSIGNED (arg1_type))
13693 {
13694 max_lo = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13695 min_lo = 0;
13696 min_hi = 0;
13697 }
13698 else
13699 {
13700 max_lo = signed_max_lo;
13701 min_lo = (HOST_WIDE_INT_M1U << (width - 1));
13702 min_hi = -1;
13703 }
13704 }
13705 else
13706 {
13707 width -= HOST_BITS_PER_WIDE_INT;
13708 signed_max_lo = -1;
13709 signed_max_hi = ((unsigned HOST_WIDE_INT) 1 << (width - 1))
13710 - 1;
13711 max_lo = -1;
13712 min_lo = 0;
13713
13714 if (TYPE_UNSIGNED (arg1_type))
13715 {
13716 max_hi = ((unsigned HOST_WIDE_INT) 2 << (width - 1)) - 1;
13717 min_hi = 0;
13718 }
13719 else
13720 {
13721 max_hi = signed_max_hi;
13722 min_hi = (HOST_WIDE_INT_M1U << (width - 1));
13723 }
13724 }
13725
13726 if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1) == max_hi
13727 && TREE_INT_CST_LOW (arg1) == max_lo)
13728 switch (code)
13729 {
13730 case GT_EXPR:
13731 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13732
13733 case GE_EXPR:
13734 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13735
13736 case LE_EXPR:
13737 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13738
13739 case LT_EXPR:
13740 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13741
13742 /* The GE_EXPR and LT_EXPR cases above are not normally
13743 reached because of previous transformations. */
13744
13745 default:
13746 break;
13747 }
13748 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13749 == max_hi
13750 && TREE_INT_CST_LOW (arg1) == max_lo - 1)
13751 switch (code)
13752 {
13753 case GT_EXPR:
13754 arg1 = const_binop (PLUS_EXPR, arg1,
13755 build_int_cst (TREE_TYPE (arg1), 1));
13756 return fold_build2_loc (loc, EQ_EXPR, type,
13757 fold_convert_loc (loc,
13758 TREE_TYPE (arg1), arg0),
13759 arg1);
13760 case LE_EXPR:
13761 arg1 = const_binop (PLUS_EXPR, arg1,
13762 build_int_cst (TREE_TYPE (arg1), 1));
13763 return fold_build2_loc (loc, NE_EXPR, type,
13764 fold_convert_loc (loc, TREE_TYPE (arg1),
13765 arg0),
13766 arg1);
13767 default:
13768 break;
13769 }
13770 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13771 == min_hi
13772 && TREE_INT_CST_LOW (arg1) == min_lo)
13773 switch (code)
13774 {
13775 case LT_EXPR:
13776 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13777
13778 case LE_EXPR:
13779 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13780
13781 case GE_EXPR:
13782 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13783
13784 case GT_EXPR:
13785 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13786
13787 default:
13788 break;
13789 }
13790 else if ((unsigned HOST_WIDE_INT) TREE_INT_CST_HIGH (arg1)
13791 == min_hi
13792 && TREE_INT_CST_LOW (arg1) == min_lo + 1)
13793 switch (code)
13794 {
13795 case GE_EXPR:
13796 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13797 return fold_build2_loc (loc, NE_EXPR, type,
13798 fold_convert_loc (loc,
13799 TREE_TYPE (arg1), arg0),
13800 arg1);
13801 case LT_EXPR:
13802 arg1 = const_binop (MINUS_EXPR, arg1, integer_one_node);
13803 return fold_build2_loc (loc, EQ_EXPR, type,
13804 fold_convert_loc (loc, TREE_TYPE (arg1),
13805 arg0),
13806 arg1);
13807 default:
13808 break;
13809 }
13810
13811 else if (TREE_INT_CST_HIGH (arg1) == signed_max_hi
13812 && TREE_INT_CST_LOW (arg1) == signed_max_lo
13813 && TYPE_UNSIGNED (arg1_type)
13814 /* We will flip the signedness of the comparison operator
13815 associated with the mode of arg1, so the sign bit is
13816 specified by this mode. Check that arg1 is the signed
13817 max associated with this sign bit. */
13818 && width == GET_MODE_BITSIZE (TYPE_MODE (arg1_type))
13819 /* signed_type does not work on pointer types. */
13820 && INTEGRAL_TYPE_P (arg1_type))
13821 {
13822 /* The following case also applies to X < signed_max+1
13823 and X >= signed_max+1 because previous transformations. */
13824 if (code == LE_EXPR || code == GT_EXPR)
13825 {
13826 tree st = signed_type_for (arg1_type);
13827 return fold_build2_loc (loc,
13828 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13829 type, fold_convert_loc (loc, st, arg0),
13830 build_int_cst (st, 0));
13831 }
13832 }
13833 }
13834 }
13835
13836 /* If we are comparing an ABS_EXPR with a constant, we can
13837 convert all the cases into explicit comparisons, but they may
13838 well not be faster than doing the ABS and one comparison.
13839 But ABS (X) <= C is a range comparison, which becomes a subtraction
13840 and a comparison, and is probably faster. */
13841 if (code == LE_EXPR
13842 && TREE_CODE (arg1) == INTEGER_CST
13843 && TREE_CODE (arg0) == ABS_EXPR
13844 && ! TREE_SIDE_EFFECTS (arg0)
13845 && (0 != (tem = negate_expr (arg1)))
13846 && TREE_CODE (tem) == INTEGER_CST
13847 && !TREE_OVERFLOW (tem))
13848 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13849 build2 (GE_EXPR, type,
13850 TREE_OPERAND (arg0, 0), tem),
13851 build2 (LE_EXPR, type,
13852 TREE_OPERAND (arg0, 0), arg1));
13853
13854 /* Convert ABS_EXPR<x> >= 0 to true. */
13855 strict_overflow_p = false;
13856 if (code == GE_EXPR
13857 && (integer_zerop (arg1)
13858 || (! HONOR_NANS (TYPE_MODE (TREE_TYPE (arg0)))
13859 && real_zerop (arg1)))
13860 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13861 {
13862 if (strict_overflow_p)
13863 fold_overflow_warning (("assuming signed overflow does not occur "
13864 "when simplifying comparison of "
13865 "absolute value and zero"),
13866 WARN_STRICT_OVERFLOW_CONDITIONAL);
13867 return omit_one_operand_loc (loc, type,
13868 constant_boolean_node (true, type),
13869 arg0);
13870 }
13871
13872 /* Convert ABS_EXPR<x> < 0 to false. */
13873 strict_overflow_p = false;
13874 if (code == LT_EXPR
13875 && (integer_zerop (arg1) || real_zerop (arg1))
13876 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13877 {
13878 if (strict_overflow_p)
13879 fold_overflow_warning (("assuming signed overflow does not occur "
13880 "when simplifying comparison of "
13881 "absolute value and zero"),
13882 WARN_STRICT_OVERFLOW_CONDITIONAL);
13883 return omit_one_operand_loc (loc, type,
13884 constant_boolean_node (false, type),
13885 arg0);
13886 }
13887
13888 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13889 and similarly for >= into !=. */
13890 if ((code == LT_EXPR || code == GE_EXPR)
13891 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13892 && TREE_CODE (arg1) == LSHIFT_EXPR
13893 && integer_onep (TREE_OPERAND (arg1, 0)))
13894 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13895 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13896 TREE_OPERAND (arg1, 1)),
13897 build_zero_cst (TREE_TYPE (arg0)));
13898
13899 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13900 otherwise Y might be >= # of bits in X's type and thus e.g.
13901 (unsigned char) (1 << Y) for Y 15 might be 0.
13902 If the cast is widening, then 1 << Y should have unsigned type,
13903 otherwise if Y is number of bits in the signed shift type minus 1,
13904 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13905 31 might be 0xffffffff80000000. */
13906 if ((code == LT_EXPR || code == GE_EXPR)
13907 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13908 && CONVERT_EXPR_P (arg1)
13909 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13910 && (TYPE_PRECISION (TREE_TYPE (arg1))
13911 >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13912 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13913 || (TYPE_PRECISION (TREE_TYPE (arg1))
13914 == TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13915 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13916 {
13917 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13918 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13919 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13920 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13921 build_zero_cst (TREE_TYPE (arg0)));
13922 }
13923
13924 return NULL_TREE;
13925
13926 case UNORDERED_EXPR:
13927 case ORDERED_EXPR:
13928 case UNLT_EXPR:
13929 case UNLE_EXPR:
13930 case UNGT_EXPR:
13931 case UNGE_EXPR:
13932 case UNEQ_EXPR:
13933 case LTGT_EXPR:
13934 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13935 {
13936 t1 = fold_relational_const (code, type, arg0, arg1);
13937 if (t1 != NULL_TREE)
13938 return t1;
13939 }
13940
13941 /* If the first operand is NaN, the result is constant. */
13942 if (TREE_CODE (arg0) == REAL_CST
13943 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13944 && (code != LTGT_EXPR || ! flag_trapping_math))
13945 {
13946 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13947 ? integer_zero_node
13948 : integer_one_node;
13949 return omit_one_operand_loc (loc, type, t1, arg1);
13950 }
13951
13952 /* If the second operand is NaN, the result is constant. */
13953 if (TREE_CODE (arg1) == REAL_CST
13954 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13955 && (code != LTGT_EXPR || ! flag_trapping_math))
13956 {
13957 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13958 ? integer_zero_node
13959 : integer_one_node;
13960 return omit_one_operand_loc (loc, type, t1, arg0);
13961 }
13962
13963 /* Simplify unordered comparison of something with itself. */
13964 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13965 && operand_equal_p (arg0, arg1, 0))
13966 return constant_boolean_node (1, type);
13967
13968 if (code == LTGT_EXPR
13969 && !flag_trapping_math
13970 && operand_equal_p (arg0, arg1, 0))
13971 return constant_boolean_node (0, type);
13972
13973 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13974 {
13975 tree targ0 = strip_float_extensions (arg0);
13976 tree targ1 = strip_float_extensions (arg1);
13977 tree newtype = TREE_TYPE (targ0);
13978
13979 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13980 newtype = TREE_TYPE (targ1);
13981
13982 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13983 return fold_build2_loc (loc, code, type,
13984 fold_convert_loc (loc, newtype, targ0),
13985 fold_convert_loc (loc, newtype, targ1));
13986 }
13987
13988 return NULL_TREE;
13989
13990 case COMPOUND_EXPR:
13991 /* When pedantic, a compound expression can be neither an lvalue
13992 nor an integer constant expression. */
13993 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13994 return NULL_TREE;
13995 /* Don't let (0, 0) be null pointer constant. */
13996 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13997 : fold_convert_loc (loc, type, arg1);
13998 return pedantic_non_lvalue_loc (loc, tem);
13999
14000 case COMPLEX_EXPR:
14001 if ((TREE_CODE (arg0) == REAL_CST
14002 && TREE_CODE (arg1) == REAL_CST)
14003 || (TREE_CODE (arg0) == INTEGER_CST
14004 && TREE_CODE (arg1) == INTEGER_CST))
14005 return build_complex (type, arg0, arg1);
14006 if (TREE_CODE (arg0) == REALPART_EXPR
14007 && TREE_CODE (arg1) == IMAGPART_EXPR
14008 && TREE_TYPE (TREE_OPERAND (arg0, 0)) == type
14009 && operand_equal_p (TREE_OPERAND (arg0, 0),
14010 TREE_OPERAND (arg1, 0), 0))
14011 return omit_one_operand_loc (loc, type, TREE_OPERAND (arg0, 0),
14012 TREE_OPERAND (arg1, 0));
14013 return NULL_TREE;
14014
14015 case ASSERT_EXPR:
14016 /* An ASSERT_EXPR should never be passed to fold_binary. */
14017 gcc_unreachable ();
14018
14019 case VEC_PACK_TRUNC_EXPR:
14020 case VEC_PACK_FIX_TRUNC_EXPR:
14021 {
14022 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14023 tree *elts;
14024
14025 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts / 2
14026 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2);
14027 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14028 return NULL_TREE;
14029
14030 elts = XALLOCAVEC (tree, nelts);
14031 if (!vec_cst_ctor_to_array (arg0, elts)
14032 || !vec_cst_ctor_to_array (arg1, elts + nelts / 2))
14033 return NULL_TREE;
14034
14035 for (i = 0; i < nelts; i++)
14036 {
14037 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
14038 ? NOP_EXPR : FIX_TRUNC_EXPR,
14039 TREE_TYPE (type), elts[i]);
14040 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
14041 return NULL_TREE;
14042 }
14043
14044 return build_vector (type, elts);
14045 }
14046
14047 case VEC_WIDEN_MULT_LO_EXPR:
14048 case VEC_WIDEN_MULT_HI_EXPR:
14049 case VEC_WIDEN_MULT_EVEN_EXPR:
14050 case VEC_WIDEN_MULT_ODD_EXPR:
14051 {
14052 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
14053 unsigned int out, ofs, scale;
14054 tree *elts;
14055
14056 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2
14057 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2);
14058 if (TREE_CODE (arg0) != VECTOR_CST || TREE_CODE (arg1) != VECTOR_CST)
14059 return NULL_TREE;
14060
14061 elts = XALLOCAVEC (tree, nelts * 4);
14062 if (!vec_cst_ctor_to_array (arg0, elts)
14063 || !vec_cst_ctor_to_array (arg1, elts + nelts * 2))
14064 return NULL_TREE;
14065
14066 if (code == VEC_WIDEN_MULT_LO_EXPR)
14067 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
14068 else if (code == VEC_WIDEN_MULT_HI_EXPR)
14069 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
14070 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
14071 scale = 1, ofs = 0;
14072 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
14073 scale = 1, ofs = 1;
14074
14075 for (out = 0; out < nelts; out++)
14076 {
14077 unsigned int in1 = (out << scale) + ofs;
14078 unsigned int in2 = in1 + nelts * 2;
14079 tree t1, t2;
14080
14081 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
14082 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
14083
14084 if (t1 == NULL_TREE || t2 == NULL_TREE)
14085 return NULL_TREE;
14086 elts[out] = const_binop (MULT_EXPR, t1, t2);
14087 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
14088 return NULL_TREE;
14089 }
14090
14091 return build_vector (type, elts);
14092 }
14093
14094 default:
14095 return NULL_TREE;
14096 } /* switch (code) */
14097 }
14098
14099 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
14100 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
14101 of GOTO_EXPR. */
14102
14103 static tree
14104 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
14105 {
14106 switch (TREE_CODE (*tp))
14107 {
14108 case LABEL_EXPR:
14109 return *tp;
14110
14111 case GOTO_EXPR:
14112 *walk_subtrees = 0;
14113
14114 /* ... fall through ... */
14115
14116 default:
14117 return NULL_TREE;
14118 }
14119 }
14120
14121 /* Return whether the sub-tree ST contains a label which is accessible from
14122 outside the sub-tree. */
14123
14124 static bool
14125 contains_label_p (tree st)
14126 {
14127 return
14128 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
14129 }
14130
14131 /* Fold a ternary expression of code CODE and type TYPE with operands
14132 OP0, OP1, and OP2. Return the folded expression if folding is
14133 successful. Otherwise, return NULL_TREE. */
14134
14135 tree
14136 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
14137 tree op0, tree op1, tree op2)
14138 {
14139 tree tem;
14140 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
14141 enum tree_code_class kind = TREE_CODE_CLASS (code);
14142
14143 gcc_assert (IS_EXPR_CODE_CLASS (kind)
14144 && TREE_CODE_LENGTH (code) == 3);
14145
14146 /* Strip any conversions that don't change the mode. This is safe
14147 for every expression, except for a comparison expression because
14148 its signedness is derived from its operands. So, in the latter
14149 case, only strip conversions that don't change the signedness.
14150
14151 Note that this is done as an internal manipulation within the
14152 constant folder, in order to find the simplest representation of
14153 the arguments so that their form can be studied. In any cases,
14154 the appropriate type conversions should be put back in the tree
14155 that will get out of the constant folder. */
14156 if (op0)
14157 {
14158 arg0 = op0;
14159 STRIP_NOPS (arg0);
14160 }
14161
14162 if (op1)
14163 {
14164 arg1 = op1;
14165 STRIP_NOPS (arg1);
14166 }
14167
14168 if (op2)
14169 {
14170 arg2 = op2;
14171 STRIP_NOPS (arg2);
14172 }
14173
14174 switch (code)
14175 {
14176 case COMPONENT_REF:
14177 if (TREE_CODE (arg0) == CONSTRUCTOR
14178 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
14179 {
14180 unsigned HOST_WIDE_INT idx;
14181 tree field, value;
14182 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
14183 if (field == arg1)
14184 return value;
14185 }
14186 return NULL_TREE;
14187
14188 case COND_EXPR:
14189 case VEC_COND_EXPR:
14190 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
14191 so all simple results must be passed through pedantic_non_lvalue. */
14192 if (TREE_CODE (arg0) == INTEGER_CST)
14193 {
14194 tree unused_op = integer_zerop (arg0) ? op1 : op2;
14195 tem = integer_zerop (arg0) ? op2 : op1;
14196 /* Only optimize constant conditions when the selected branch
14197 has the same type as the COND_EXPR. This avoids optimizing
14198 away "c ? x : throw", where the throw has a void type.
14199 Avoid throwing away that operand which contains label. */
14200 if ((!TREE_SIDE_EFFECTS (unused_op)
14201 || !contains_label_p (unused_op))
14202 && (! VOID_TYPE_P (TREE_TYPE (tem))
14203 || VOID_TYPE_P (type)))
14204 return pedantic_non_lvalue_loc (loc, tem);
14205 return NULL_TREE;
14206 }
14207 else if (TREE_CODE (arg0) == VECTOR_CST)
14208 {
14209 if (integer_all_onesp (arg0))
14210 return pedantic_omit_one_operand_loc (loc, type, arg1, arg2);
14211 if (integer_zerop (arg0))
14212 return pedantic_omit_one_operand_loc (loc, type, arg2, arg1);
14213
14214 if ((TREE_CODE (arg1) == VECTOR_CST
14215 || TREE_CODE (arg1) == CONSTRUCTOR)
14216 && (TREE_CODE (arg2) == VECTOR_CST
14217 || TREE_CODE (arg2) == CONSTRUCTOR))
14218 {
14219 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
14220 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14221 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
14222 for (i = 0; i < nelts; i++)
14223 {
14224 tree val = VECTOR_CST_ELT (arg0, i);
14225 if (integer_all_onesp (val))
14226 sel[i] = i;
14227 else if (integer_zerop (val))
14228 sel[i] = nelts + i;
14229 else /* Currently unreachable. */
14230 return NULL_TREE;
14231 }
14232 tree t = fold_vec_perm (type, arg1, arg2, sel);
14233 if (t != NULL_TREE)
14234 return t;
14235 }
14236 }
14237
14238 if (operand_equal_p (arg1, op2, 0))
14239 return pedantic_omit_one_operand_loc (loc, type, arg1, arg0);
14240
14241 /* If we have A op B ? A : C, we may be able to convert this to a
14242 simpler expression, depending on the operation and the values
14243 of B and C. Signed zeros prevent all of these transformations,
14244 for reasons given above each one.
14245
14246 Also try swapping the arguments and inverting the conditional. */
14247 if (COMPARISON_CLASS_P (arg0)
14248 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14249 arg1, TREE_OPERAND (arg0, 1))
14250 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (arg1))))
14251 {
14252 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
14253 if (tem)
14254 return tem;
14255 }
14256
14257 if (COMPARISON_CLASS_P (arg0)
14258 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
14259 op2,
14260 TREE_OPERAND (arg0, 1))
14261 && !HONOR_SIGNED_ZEROS (TYPE_MODE (TREE_TYPE (op2))))
14262 {
14263 location_t loc0 = expr_location_or (arg0, loc);
14264 tem = fold_invert_truthvalue (loc0, arg0);
14265 if (tem && COMPARISON_CLASS_P (tem))
14266 {
14267 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
14268 if (tem)
14269 return tem;
14270 }
14271 }
14272
14273 /* If the second operand is simpler than the third, swap them
14274 since that produces better jump optimization results. */
14275 if (truth_value_p (TREE_CODE (arg0))
14276 && tree_swap_operands_p (op1, op2, false))
14277 {
14278 location_t loc0 = expr_location_or (arg0, loc);
14279 /* See if this can be inverted. If it can't, possibly because
14280 it was a floating-point inequality comparison, don't do
14281 anything. */
14282 tem = fold_invert_truthvalue (loc0, arg0);
14283 if (tem)
14284 return fold_build3_loc (loc, code, type, tem, op2, op1);
14285 }
14286
14287 /* Convert A ? 1 : 0 to simply A. */
14288 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
14289 : (integer_onep (op1)
14290 && !VECTOR_TYPE_P (type)))
14291 && integer_zerop (op2)
14292 /* If we try to convert OP0 to our type, the
14293 call to fold will try to move the conversion inside
14294 a COND, which will recurse. In that case, the COND_EXPR
14295 is probably the best choice, so leave it alone. */
14296 && type == TREE_TYPE (arg0))
14297 return pedantic_non_lvalue_loc (loc, arg0);
14298
14299 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
14300 over COND_EXPR in cases such as floating point comparisons. */
14301 if (integer_zerop (op1)
14302 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
14303 : (integer_onep (op2)
14304 && !VECTOR_TYPE_P (type)))
14305 && truth_value_p (TREE_CODE (arg0)))
14306 return pedantic_non_lvalue_loc (loc,
14307 fold_convert_loc (loc, type,
14308 invert_truthvalue_loc (loc,
14309 arg0)));
14310
14311 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
14312 if (TREE_CODE (arg0) == LT_EXPR
14313 && integer_zerop (TREE_OPERAND (arg0, 1))
14314 && integer_zerop (op2)
14315 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
14316 {
14317 /* sign_bit_p looks through both zero and sign extensions,
14318 but for this optimization only sign extensions are
14319 usable. */
14320 tree tem2 = TREE_OPERAND (arg0, 0);
14321 while (tem != tem2)
14322 {
14323 if (TREE_CODE (tem2) != NOP_EXPR
14324 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
14325 {
14326 tem = NULL_TREE;
14327 break;
14328 }
14329 tem2 = TREE_OPERAND (tem2, 0);
14330 }
14331 /* sign_bit_p only checks ARG1 bits within A's precision.
14332 If <sign bit of A> has wider type than A, bits outside
14333 of A's precision in <sign bit of A> need to be checked.
14334 If they are all 0, this optimization needs to be done
14335 in unsigned A's type, if they are all 1 in signed A's type,
14336 otherwise this can't be done. */
14337 if (tem
14338 && TYPE_PRECISION (TREE_TYPE (tem))
14339 < TYPE_PRECISION (TREE_TYPE (arg1))
14340 && TYPE_PRECISION (TREE_TYPE (tem))
14341 < TYPE_PRECISION (type))
14342 {
14343 unsigned HOST_WIDE_INT mask_lo;
14344 HOST_WIDE_INT mask_hi;
14345 int inner_width, outer_width;
14346 tree tem_type;
14347
14348 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
14349 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
14350 if (outer_width > TYPE_PRECISION (type))
14351 outer_width = TYPE_PRECISION (type);
14352
14353 if (outer_width > HOST_BITS_PER_WIDE_INT)
14354 {
14355 mask_hi = (HOST_WIDE_INT_M1U
14356 >> (HOST_BITS_PER_DOUBLE_INT - outer_width));
14357 mask_lo = -1;
14358 }
14359 else
14360 {
14361 mask_hi = 0;
14362 mask_lo = (HOST_WIDE_INT_M1U
14363 >> (HOST_BITS_PER_WIDE_INT - outer_width));
14364 }
14365 if (inner_width > HOST_BITS_PER_WIDE_INT)
14366 {
14367 mask_hi &= ~(HOST_WIDE_INT_M1U
14368 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14369 mask_lo = 0;
14370 }
14371 else
14372 mask_lo &= ~(HOST_WIDE_INT_M1U
14373 >> (HOST_BITS_PER_WIDE_INT - inner_width));
14374
14375 if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == mask_hi
14376 && (TREE_INT_CST_LOW (arg1) & mask_lo) == mask_lo)
14377 {
14378 tem_type = signed_type_for (TREE_TYPE (tem));
14379 tem = fold_convert_loc (loc, tem_type, tem);
14380 }
14381 else if ((TREE_INT_CST_HIGH (arg1) & mask_hi) == 0
14382 && (TREE_INT_CST_LOW (arg1) & mask_lo) == 0)
14383 {
14384 tem_type = unsigned_type_for (TREE_TYPE (tem));
14385 tem = fold_convert_loc (loc, tem_type, tem);
14386 }
14387 else
14388 tem = NULL;
14389 }
14390
14391 if (tem)
14392 return
14393 fold_convert_loc (loc, type,
14394 fold_build2_loc (loc, BIT_AND_EXPR,
14395 TREE_TYPE (tem), tem,
14396 fold_convert_loc (loc,
14397 TREE_TYPE (tem),
14398 arg1)));
14399 }
14400
14401 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
14402 already handled above. */
14403 if (TREE_CODE (arg0) == BIT_AND_EXPR
14404 && integer_onep (TREE_OPERAND (arg0, 1))
14405 && integer_zerop (op2)
14406 && integer_pow2p (arg1))
14407 {
14408 tree tem = TREE_OPERAND (arg0, 0);
14409 STRIP_NOPS (tem);
14410 if (TREE_CODE (tem) == RSHIFT_EXPR
14411 && TREE_CODE (TREE_OPERAND (tem, 1)) == INTEGER_CST
14412 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
14413 TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)))
14414 return fold_build2_loc (loc, BIT_AND_EXPR, type,
14415 TREE_OPERAND (tem, 0), arg1);
14416 }
14417
14418 /* A & N ? N : 0 is simply A & N if N is a power of two. This
14419 is probably obsolete because the first operand should be a
14420 truth value (that's why we have the two cases above), but let's
14421 leave it in until we can confirm this for all front-ends. */
14422 if (integer_zerop (op2)
14423 && TREE_CODE (arg0) == NE_EXPR
14424 && integer_zerop (TREE_OPERAND (arg0, 1))
14425 && integer_pow2p (arg1)
14426 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
14427 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
14428 arg1, OEP_ONLY_CONST))
14429 return pedantic_non_lvalue_loc (loc,
14430 fold_convert_loc (loc, type,
14431 TREE_OPERAND (arg0, 0)));
14432
14433 /* Disable the transformations below for vectors, since
14434 fold_binary_op_with_conditional_arg may undo them immediately,
14435 yielding an infinite loop. */
14436 if (code == VEC_COND_EXPR)
14437 return NULL_TREE;
14438
14439 /* Convert A ? B : 0 into A && B if A and B are truth values. */
14440 if (integer_zerop (op2)
14441 && truth_value_p (TREE_CODE (arg0))
14442 && truth_value_p (TREE_CODE (arg1))
14443 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14444 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
14445 : TRUTH_ANDIF_EXPR,
14446 type, fold_convert_loc (loc, type, arg0), arg1);
14447
14448 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
14449 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
14450 && truth_value_p (TREE_CODE (arg0))
14451 && truth_value_p (TREE_CODE (arg1))
14452 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14453 {
14454 location_t loc0 = expr_location_or (arg0, loc);
14455 /* Only perform transformation if ARG0 is easily inverted. */
14456 tem = fold_invert_truthvalue (loc0, arg0);
14457 if (tem)
14458 return fold_build2_loc (loc, code == VEC_COND_EXPR
14459 ? BIT_IOR_EXPR
14460 : TRUTH_ORIF_EXPR,
14461 type, fold_convert_loc (loc, type, tem),
14462 arg1);
14463 }
14464
14465 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
14466 if (integer_zerop (arg1)
14467 && truth_value_p (TREE_CODE (arg0))
14468 && truth_value_p (TREE_CODE (op2))
14469 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14470 {
14471 location_t loc0 = expr_location_or (arg0, loc);
14472 /* Only perform transformation if ARG0 is easily inverted. */
14473 tem = fold_invert_truthvalue (loc0, arg0);
14474 if (tem)
14475 return fold_build2_loc (loc, code == VEC_COND_EXPR
14476 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
14477 type, fold_convert_loc (loc, type, tem),
14478 op2);
14479 }
14480
14481 /* Convert A ? 1 : B into A || B if A and B are truth values. */
14482 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
14483 && truth_value_p (TREE_CODE (arg0))
14484 && truth_value_p (TREE_CODE (op2))
14485 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
14486 return fold_build2_loc (loc, code == VEC_COND_EXPR
14487 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
14488 type, fold_convert_loc (loc, type, arg0), op2);
14489
14490 return NULL_TREE;
14491
14492 case CALL_EXPR:
14493 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
14494 of fold_ternary on them. */
14495 gcc_unreachable ();
14496
14497 case BIT_FIELD_REF:
14498 if ((TREE_CODE (arg0) == VECTOR_CST
14499 || (TREE_CODE (arg0) == CONSTRUCTOR
14500 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
14501 && (type == TREE_TYPE (TREE_TYPE (arg0))
14502 || (TREE_CODE (type) == VECTOR_TYPE
14503 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
14504 {
14505 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
14506 unsigned HOST_WIDE_INT width = tree_low_cst (TYPE_SIZE (eltype), 1);
14507 unsigned HOST_WIDE_INT n = tree_low_cst (arg1, 1);
14508 unsigned HOST_WIDE_INT idx = tree_low_cst (op2, 1);
14509
14510 if (n != 0
14511 && (idx % width) == 0
14512 && (n % width) == 0
14513 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
14514 {
14515 idx = idx / width;
14516 n = n / width;
14517
14518 if (TREE_CODE (arg0) == VECTOR_CST)
14519 {
14520 if (n == 1)
14521 return VECTOR_CST_ELT (arg0, idx);
14522
14523 tree *vals = XALLOCAVEC (tree, n);
14524 for (unsigned i = 0; i < n; ++i)
14525 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
14526 return build_vector (type, vals);
14527 }
14528
14529 /* Constructor elements can be subvectors. */
14530 unsigned HOST_WIDE_INT k = 1;
14531 if (CONSTRUCTOR_NELTS (arg0) != 0)
14532 {
14533 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
14534 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
14535 k = TYPE_VECTOR_SUBPARTS (cons_elem);
14536 }
14537
14538 /* We keep an exact subset of the constructor elements. */
14539 if ((idx % k) == 0 && (n % k) == 0)
14540 {
14541 if (CONSTRUCTOR_NELTS (arg0) == 0)
14542 return build_constructor (type, NULL);
14543 idx /= k;
14544 n /= k;
14545 if (n == 1)
14546 {
14547 if (idx < CONSTRUCTOR_NELTS (arg0))
14548 return CONSTRUCTOR_ELT (arg0, idx)->value;
14549 return build_zero_cst (type);
14550 }
14551
14552 vec<constructor_elt, va_gc> *vals;
14553 vec_alloc (vals, n);
14554 for (unsigned i = 0;
14555 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
14556 ++i)
14557 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
14558 CONSTRUCTOR_ELT
14559 (arg0, idx + i)->value);
14560 return build_constructor (type, vals);
14561 }
14562 /* The bitfield references a single constructor element. */
14563 else if (idx + n <= (idx / k + 1) * k)
14564 {
14565 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
14566 return build_zero_cst (type);
14567 else if (n == k)
14568 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
14569 else
14570 return fold_build3_loc (loc, code, type,
14571 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
14572 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
14573 }
14574 }
14575 }
14576
14577 /* A bit-field-ref that referenced the full argument can be stripped. */
14578 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
14579 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_low_cst (arg1, 1)
14580 && integer_zerop (op2))
14581 return fold_convert_loc (loc, type, arg0);
14582
14583 /* On constants we can use native encode/interpret to constant
14584 fold (nearly) all BIT_FIELD_REFs. */
14585 if (CONSTANT_CLASS_P (arg0)
14586 && can_native_interpret_type_p (type)
14587 && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1)
14588 /* This limitation should not be necessary, we just need to
14589 round this up to mode size. */
14590 && tree_low_cst (op1, 1) % BITS_PER_UNIT == 0
14591 /* Need bit-shifting of the buffer to relax the following. */
14592 && tree_low_cst (op2, 1) % BITS_PER_UNIT == 0)
14593 {
14594 unsigned HOST_WIDE_INT bitpos = tree_low_cst (op2, 1);
14595 unsigned HOST_WIDE_INT bitsize = tree_low_cst (op1, 1);
14596 unsigned HOST_WIDE_INT clen;
14597 clen = tree_low_cst (TYPE_SIZE_UNIT (TREE_TYPE (arg0)), 1);
14598 /* ??? We cannot tell native_encode_expr to start at
14599 some random byte only. So limit us to a reasonable amount
14600 of work. */
14601 if (clen <= 4096)
14602 {
14603 unsigned char *b = XALLOCAVEC (unsigned char, clen);
14604 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
14605 if (len > 0
14606 && len * BITS_PER_UNIT >= bitpos + bitsize)
14607 {
14608 tree v = native_interpret_expr (type,
14609 b + bitpos / BITS_PER_UNIT,
14610 bitsize / BITS_PER_UNIT);
14611 if (v)
14612 return v;
14613 }
14614 }
14615 }
14616
14617 return NULL_TREE;
14618
14619 case FMA_EXPR:
14620 /* For integers we can decompose the FMA if possible. */
14621 if (TREE_CODE (arg0) == INTEGER_CST
14622 && TREE_CODE (arg1) == INTEGER_CST)
14623 return fold_build2_loc (loc, PLUS_EXPR, type,
14624 const_binop (MULT_EXPR, arg0, arg1), arg2);
14625 if (integer_zerop (arg2))
14626 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
14627
14628 return fold_fma (loc, type, arg0, arg1, arg2);
14629
14630 case VEC_PERM_EXPR:
14631 if (TREE_CODE (arg2) == VECTOR_CST)
14632 {
14633 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask;
14634 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
14635 tree t;
14636 bool need_mask_canon = false;
14637 bool all_in_vec0 = true;
14638 bool all_in_vec1 = true;
14639 bool maybe_identity = true;
14640 bool single_arg = (op0 == op1);
14641 bool changed = false;
14642
14643 mask = single_arg ? (nelts - 1) : (2 * nelts - 1);
14644 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
14645 for (i = 0; i < nelts; i++)
14646 {
14647 tree val = VECTOR_CST_ELT (arg2, i);
14648 if (TREE_CODE (val) != INTEGER_CST)
14649 return NULL_TREE;
14650
14651 sel[i] = TREE_INT_CST_LOW (val) & mask;
14652 if (TREE_INT_CST_HIGH (val)
14653 || ((unsigned HOST_WIDE_INT)
14654 TREE_INT_CST_LOW (val) != sel[i]))
14655 need_mask_canon = true;
14656
14657 if (sel[i] < nelts)
14658 all_in_vec1 = false;
14659 else
14660 all_in_vec0 = false;
14661
14662 if ((sel[i] & (nelts-1)) != i)
14663 maybe_identity = false;
14664 }
14665
14666 if (maybe_identity)
14667 {
14668 if (all_in_vec0)
14669 return op0;
14670 if (all_in_vec1)
14671 return op1;
14672 }
14673
14674 if (all_in_vec0)
14675 op1 = op0;
14676 else if (all_in_vec1)
14677 {
14678 op0 = op1;
14679 for (i = 0; i < nelts; i++)
14680 sel[i] -= nelts;
14681 need_mask_canon = true;
14682 }
14683
14684 if ((TREE_CODE (op0) == VECTOR_CST
14685 || TREE_CODE (op0) == CONSTRUCTOR)
14686 && (TREE_CODE (op1) == VECTOR_CST
14687 || TREE_CODE (op1) == CONSTRUCTOR))
14688 {
14689 t = fold_vec_perm (type, op0, op1, sel);
14690 if (t != NULL_TREE)
14691 return t;
14692 }
14693
14694 if (op0 == op1 && !single_arg)
14695 changed = true;
14696
14697 if (need_mask_canon && arg2 == op2)
14698 {
14699 tree *tsel = XALLOCAVEC (tree, nelts);
14700 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
14701 for (i = 0; i < nelts; i++)
14702 tsel[i] = build_int_cst (eltype, sel[i]);
14703 op2 = build_vector (TREE_TYPE (arg2), tsel);
14704 changed = true;
14705 }
14706
14707 if (changed)
14708 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
14709 }
14710 return NULL_TREE;
14711
14712 default:
14713 return NULL_TREE;
14714 } /* switch (code) */
14715 }
14716
14717 /* Perform constant folding and related simplification of EXPR.
14718 The related simplifications include x*1 => x, x*0 => 0, etc.,
14719 and application of the associative law.
14720 NOP_EXPR conversions may be removed freely (as long as we
14721 are careful not to change the type of the overall expression).
14722 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
14723 but we can constant-fold them if they have constant operands. */
14724
14725 #ifdef ENABLE_FOLD_CHECKING
14726 # define fold(x) fold_1 (x)
14727 static tree fold_1 (tree);
14728 static
14729 #endif
14730 tree
14731 fold (tree expr)
14732 {
14733 const tree t = expr;
14734 enum tree_code code = TREE_CODE (t);
14735 enum tree_code_class kind = TREE_CODE_CLASS (code);
14736 tree tem;
14737 location_t loc = EXPR_LOCATION (expr);
14738
14739 /* Return right away if a constant. */
14740 if (kind == tcc_constant)
14741 return t;
14742
14743 /* CALL_EXPR-like objects with variable numbers of operands are
14744 treated specially. */
14745 if (kind == tcc_vl_exp)
14746 {
14747 if (code == CALL_EXPR)
14748 {
14749 tem = fold_call_expr (loc, expr, false);
14750 return tem ? tem : expr;
14751 }
14752 return expr;
14753 }
14754
14755 if (IS_EXPR_CODE_CLASS (kind))
14756 {
14757 tree type = TREE_TYPE (t);
14758 tree op0, op1, op2;
14759
14760 switch (TREE_CODE_LENGTH (code))
14761 {
14762 case 1:
14763 op0 = TREE_OPERAND (t, 0);
14764 tem = fold_unary_loc (loc, code, type, op0);
14765 return tem ? tem : expr;
14766 case 2:
14767 op0 = TREE_OPERAND (t, 0);
14768 op1 = TREE_OPERAND (t, 1);
14769 tem = fold_binary_loc (loc, code, type, op0, op1);
14770 return tem ? tem : expr;
14771 case 3:
14772 op0 = TREE_OPERAND (t, 0);
14773 op1 = TREE_OPERAND (t, 1);
14774 op2 = TREE_OPERAND (t, 2);
14775 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14776 return tem ? tem : expr;
14777 default:
14778 break;
14779 }
14780 }
14781
14782 switch (code)
14783 {
14784 case ARRAY_REF:
14785 {
14786 tree op0 = TREE_OPERAND (t, 0);
14787 tree op1 = TREE_OPERAND (t, 1);
14788
14789 if (TREE_CODE (op1) == INTEGER_CST
14790 && TREE_CODE (op0) == CONSTRUCTOR
14791 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
14792 {
14793 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
14794 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
14795 unsigned HOST_WIDE_INT begin = 0;
14796
14797 /* Find a matching index by means of a binary search. */
14798 while (begin != end)
14799 {
14800 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
14801 tree index = (*elts)[middle].index;
14802
14803 if (TREE_CODE (index) == INTEGER_CST
14804 && tree_int_cst_lt (index, op1))
14805 begin = middle + 1;
14806 else if (TREE_CODE (index) == INTEGER_CST
14807 && tree_int_cst_lt (op1, index))
14808 end = middle;
14809 else if (TREE_CODE (index) == RANGE_EXPR
14810 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
14811 begin = middle + 1;
14812 else if (TREE_CODE (index) == RANGE_EXPR
14813 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
14814 end = middle;
14815 else
14816 return (*elts)[middle].value;
14817 }
14818 }
14819
14820 return t;
14821 }
14822
14823 /* Return a VECTOR_CST if possible. */
14824 case CONSTRUCTOR:
14825 {
14826 tree type = TREE_TYPE (t);
14827 if (TREE_CODE (type) != VECTOR_TYPE)
14828 return t;
14829
14830 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14831 unsigned HOST_WIDE_INT idx, pos = 0;
14832 tree value;
14833
14834 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14835 {
14836 if (!CONSTANT_CLASS_P (value))
14837 return t;
14838 if (TREE_CODE (value) == VECTOR_CST)
14839 {
14840 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14841 vec[pos++] = VECTOR_CST_ELT (value, i);
14842 }
14843 else
14844 vec[pos++] = value;
14845 }
14846 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14847 vec[pos] = build_zero_cst (TREE_TYPE (type));
14848
14849 return build_vector (type, vec);
14850 }
14851
14852 case CONST_DECL:
14853 return fold (DECL_INITIAL (t));
14854
14855 default:
14856 return t;
14857 } /* switch (code) */
14858 }
14859
14860 #ifdef ENABLE_FOLD_CHECKING
14861 #undef fold
14862
14863 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14864 hash_table <pointer_hash <tree_node> >);
14865 static void fold_check_failed (const_tree, const_tree);
14866 void print_fold_checksum (const_tree);
14867
14868 /* When --enable-checking=fold, compute a digest of expr before
14869 and after actual fold call to see if fold did not accidentally
14870 change original expr. */
14871
14872 tree
14873 fold (tree expr)
14874 {
14875 tree ret;
14876 struct md5_ctx ctx;
14877 unsigned char checksum_before[16], checksum_after[16];
14878 hash_table <pointer_hash <tree_node> > ht;
14879
14880 ht.create (32);
14881 md5_init_ctx (&ctx);
14882 fold_checksum_tree (expr, &ctx, ht);
14883 md5_finish_ctx (&ctx, checksum_before);
14884 ht.empty ();
14885
14886 ret = fold_1 (expr);
14887
14888 md5_init_ctx (&ctx);
14889 fold_checksum_tree (expr, &ctx, ht);
14890 md5_finish_ctx (&ctx, checksum_after);
14891 ht.dispose ();
14892
14893 if (memcmp (checksum_before, checksum_after, 16))
14894 fold_check_failed (expr, ret);
14895
14896 return ret;
14897 }
14898
14899 void
14900 print_fold_checksum (const_tree expr)
14901 {
14902 struct md5_ctx ctx;
14903 unsigned char checksum[16], cnt;
14904 hash_table <pointer_hash <tree_node> > ht;
14905
14906 ht.create (32);
14907 md5_init_ctx (&ctx);
14908 fold_checksum_tree (expr, &ctx, ht);
14909 md5_finish_ctx (&ctx, checksum);
14910 ht.dispose ();
14911 for (cnt = 0; cnt < 16; ++cnt)
14912 fprintf (stderr, "%02x", checksum[cnt]);
14913 putc ('\n', stderr);
14914 }
14915
14916 static void
14917 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14918 {
14919 internal_error ("fold check: original tree changed by fold");
14920 }
14921
14922 static void
14923 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14924 hash_table <pointer_hash <tree_node> > ht)
14925 {
14926 tree_node **slot;
14927 enum tree_code code;
14928 union tree_node buf;
14929 int i, len;
14930
14931 recursive_label:
14932 if (expr == NULL)
14933 return;
14934 slot = ht.find_slot (expr, INSERT);
14935 if (*slot != NULL)
14936 return;
14937 *slot = CONST_CAST_TREE (expr);
14938 code = TREE_CODE (expr);
14939 if (TREE_CODE_CLASS (code) == tcc_declaration
14940 && DECL_ASSEMBLER_NAME_SET_P (expr))
14941 {
14942 /* Allow DECL_ASSEMBLER_NAME to be modified. */
14943 memcpy ((char *) &buf, expr, tree_size (expr));
14944 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14945 expr = (tree) &buf;
14946 }
14947 else if (TREE_CODE_CLASS (code) == tcc_type
14948 && (TYPE_POINTER_TO (expr)
14949 || TYPE_REFERENCE_TO (expr)
14950 || TYPE_CACHED_VALUES_P (expr)
14951 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14952 || TYPE_NEXT_VARIANT (expr)))
14953 {
14954 /* Allow these fields to be modified. */
14955 tree tmp;
14956 memcpy ((char *) &buf, expr, tree_size (expr));
14957 expr = tmp = (tree) &buf;
14958 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14959 TYPE_POINTER_TO (tmp) = NULL;
14960 TYPE_REFERENCE_TO (tmp) = NULL;
14961 TYPE_NEXT_VARIANT (tmp) = NULL;
14962 if (TYPE_CACHED_VALUES_P (tmp))
14963 {
14964 TYPE_CACHED_VALUES_P (tmp) = 0;
14965 TYPE_CACHED_VALUES (tmp) = NULL;
14966 }
14967 }
14968 md5_process_bytes (expr, tree_size (expr), ctx);
14969 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14970 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14971 if (TREE_CODE_CLASS (code) != tcc_type
14972 && TREE_CODE_CLASS (code) != tcc_declaration
14973 && code != TREE_LIST
14974 && code != SSA_NAME
14975 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14976 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14977 switch (TREE_CODE_CLASS (code))
14978 {
14979 case tcc_constant:
14980 switch (code)
14981 {
14982 case STRING_CST:
14983 md5_process_bytes (TREE_STRING_POINTER (expr),
14984 TREE_STRING_LENGTH (expr), ctx);
14985 break;
14986 case COMPLEX_CST:
14987 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14988 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14989 break;
14990 case VECTOR_CST:
14991 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14992 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14993 break;
14994 default:
14995 break;
14996 }
14997 break;
14998 case tcc_exceptional:
14999 switch (code)
15000 {
15001 case TREE_LIST:
15002 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
15003 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
15004 expr = TREE_CHAIN (expr);
15005 goto recursive_label;
15006 break;
15007 case TREE_VEC:
15008 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
15009 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
15010 break;
15011 default:
15012 break;
15013 }
15014 break;
15015 case tcc_expression:
15016 case tcc_reference:
15017 case tcc_comparison:
15018 case tcc_unary:
15019 case tcc_binary:
15020 case tcc_statement:
15021 case tcc_vl_exp:
15022 len = TREE_OPERAND_LENGTH (expr);
15023 for (i = 0; i < len; ++i)
15024 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
15025 break;
15026 case tcc_declaration:
15027 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
15028 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
15029 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
15030 {
15031 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
15032 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
15033 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
15034 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
15035 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
15036 }
15037 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_WITH_VIS))
15038 fold_checksum_tree (DECL_SECTION_NAME (expr), ctx, ht);
15039
15040 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
15041 {
15042 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
15043 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
15044 fold_checksum_tree (DECL_ARGUMENT_FLD (expr), ctx, ht);
15045 }
15046 break;
15047 case tcc_type:
15048 if (TREE_CODE (expr) == ENUMERAL_TYPE)
15049 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
15050 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
15051 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
15052 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
15053 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
15054 if (INTEGRAL_TYPE_P (expr)
15055 || SCALAR_FLOAT_TYPE_P (expr))
15056 {
15057 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
15058 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
15059 }
15060 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
15061 if (TREE_CODE (expr) == RECORD_TYPE
15062 || TREE_CODE (expr) == UNION_TYPE
15063 || TREE_CODE (expr) == QUAL_UNION_TYPE)
15064 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
15065 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
15066 break;
15067 default:
15068 break;
15069 }
15070 }
15071
15072 /* Helper function for outputting the checksum of a tree T. When
15073 debugging with gdb, you can "define mynext" to be "next" followed
15074 by "call debug_fold_checksum (op0)", then just trace down till the
15075 outputs differ. */
15076
15077 DEBUG_FUNCTION void
15078 debug_fold_checksum (const_tree t)
15079 {
15080 int i;
15081 unsigned char checksum[16];
15082 struct md5_ctx ctx;
15083 hash_table <pointer_hash <tree_node> > ht;
15084 ht.create (32);
15085
15086 md5_init_ctx (&ctx);
15087 fold_checksum_tree (t, &ctx, ht);
15088 md5_finish_ctx (&ctx, checksum);
15089 ht.empty ();
15090
15091 for (i = 0; i < 16; i++)
15092 fprintf (stderr, "%d ", checksum[i]);
15093
15094 fprintf (stderr, "\n");
15095 }
15096
15097 #endif
15098
15099 /* Fold a unary tree expression with code CODE of type TYPE with an
15100 operand OP0. LOC is the location of the resulting expression.
15101 Return a folded expression if successful. Otherwise, return a tree
15102 expression with code CODE of type TYPE with an operand OP0. */
15103
15104 tree
15105 fold_build1_stat_loc (location_t loc,
15106 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
15107 {
15108 tree tem;
15109 #ifdef ENABLE_FOLD_CHECKING
15110 unsigned char checksum_before[16], checksum_after[16];
15111 struct md5_ctx ctx;
15112 hash_table <pointer_hash <tree_node> > ht;
15113
15114 ht.create (32);
15115 md5_init_ctx (&ctx);
15116 fold_checksum_tree (op0, &ctx, ht);
15117 md5_finish_ctx (&ctx, checksum_before);
15118 ht.empty ();
15119 #endif
15120
15121 tem = fold_unary_loc (loc, code, type, op0);
15122 if (!tem)
15123 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
15124
15125 #ifdef ENABLE_FOLD_CHECKING
15126 md5_init_ctx (&ctx);
15127 fold_checksum_tree (op0, &ctx, ht);
15128 md5_finish_ctx (&ctx, checksum_after);
15129 ht.dispose ();
15130
15131 if (memcmp (checksum_before, checksum_after, 16))
15132 fold_check_failed (op0, tem);
15133 #endif
15134 return tem;
15135 }
15136
15137 /* Fold a binary tree expression with code CODE of type TYPE with
15138 operands OP0 and OP1. LOC is the location of the resulting
15139 expression. Return a folded expression if successful. Otherwise,
15140 return a tree expression with code CODE of type TYPE with operands
15141 OP0 and OP1. */
15142
15143 tree
15144 fold_build2_stat_loc (location_t loc,
15145 enum tree_code code, tree type, tree op0, tree op1
15146 MEM_STAT_DECL)
15147 {
15148 tree tem;
15149 #ifdef ENABLE_FOLD_CHECKING
15150 unsigned char checksum_before_op0[16],
15151 checksum_before_op1[16],
15152 checksum_after_op0[16],
15153 checksum_after_op1[16];
15154 struct md5_ctx ctx;
15155 hash_table <pointer_hash <tree_node> > ht;
15156
15157 ht.create (32);
15158 md5_init_ctx (&ctx);
15159 fold_checksum_tree (op0, &ctx, ht);
15160 md5_finish_ctx (&ctx, checksum_before_op0);
15161 ht.empty ();
15162
15163 md5_init_ctx (&ctx);
15164 fold_checksum_tree (op1, &ctx, ht);
15165 md5_finish_ctx (&ctx, checksum_before_op1);
15166 ht.empty ();
15167 #endif
15168
15169 tem = fold_binary_loc (loc, code, type, op0, op1);
15170 if (!tem)
15171 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
15172
15173 #ifdef ENABLE_FOLD_CHECKING
15174 md5_init_ctx (&ctx);
15175 fold_checksum_tree (op0, &ctx, ht);
15176 md5_finish_ctx (&ctx, checksum_after_op0);
15177 ht.empty ();
15178
15179 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15180 fold_check_failed (op0, tem);
15181
15182 md5_init_ctx (&ctx);
15183 fold_checksum_tree (op1, &ctx, ht);
15184 md5_finish_ctx (&ctx, checksum_after_op1);
15185 ht.dispose ();
15186
15187 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15188 fold_check_failed (op1, tem);
15189 #endif
15190 return tem;
15191 }
15192
15193 /* Fold a ternary tree expression with code CODE of type TYPE with
15194 operands OP0, OP1, and OP2. Return a folded expression if
15195 successful. Otherwise, return a tree expression with code CODE of
15196 type TYPE with operands OP0, OP1, and OP2. */
15197
15198 tree
15199 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
15200 tree op0, tree op1, tree op2 MEM_STAT_DECL)
15201 {
15202 tree tem;
15203 #ifdef ENABLE_FOLD_CHECKING
15204 unsigned char checksum_before_op0[16],
15205 checksum_before_op1[16],
15206 checksum_before_op2[16],
15207 checksum_after_op0[16],
15208 checksum_after_op1[16],
15209 checksum_after_op2[16];
15210 struct md5_ctx ctx;
15211 hash_table <pointer_hash <tree_node> > ht;
15212
15213 ht.create (32);
15214 md5_init_ctx (&ctx);
15215 fold_checksum_tree (op0, &ctx, ht);
15216 md5_finish_ctx (&ctx, checksum_before_op0);
15217 ht.empty ();
15218
15219 md5_init_ctx (&ctx);
15220 fold_checksum_tree (op1, &ctx, ht);
15221 md5_finish_ctx (&ctx, checksum_before_op1);
15222 ht.empty ();
15223
15224 md5_init_ctx (&ctx);
15225 fold_checksum_tree (op2, &ctx, ht);
15226 md5_finish_ctx (&ctx, checksum_before_op2);
15227 ht.empty ();
15228 #endif
15229
15230 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
15231 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
15232 if (!tem)
15233 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
15234
15235 #ifdef ENABLE_FOLD_CHECKING
15236 md5_init_ctx (&ctx);
15237 fold_checksum_tree (op0, &ctx, ht);
15238 md5_finish_ctx (&ctx, checksum_after_op0);
15239 ht.empty ();
15240
15241 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
15242 fold_check_failed (op0, tem);
15243
15244 md5_init_ctx (&ctx);
15245 fold_checksum_tree (op1, &ctx, ht);
15246 md5_finish_ctx (&ctx, checksum_after_op1);
15247 ht.empty ();
15248
15249 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
15250 fold_check_failed (op1, tem);
15251
15252 md5_init_ctx (&ctx);
15253 fold_checksum_tree (op2, &ctx, ht);
15254 md5_finish_ctx (&ctx, checksum_after_op2);
15255 ht.dispose ();
15256
15257 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
15258 fold_check_failed (op2, tem);
15259 #endif
15260 return tem;
15261 }
15262
15263 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
15264 arguments in ARGARRAY, and a null static chain.
15265 Return a folded expression if successful. Otherwise, return a CALL_EXPR
15266 of type TYPE from the given operands as constructed by build_call_array. */
15267
15268 tree
15269 fold_build_call_array_loc (location_t loc, tree type, tree fn,
15270 int nargs, tree *argarray)
15271 {
15272 tree tem;
15273 #ifdef ENABLE_FOLD_CHECKING
15274 unsigned char checksum_before_fn[16],
15275 checksum_before_arglist[16],
15276 checksum_after_fn[16],
15277 checksum_after_arglist[16];
15278 struct md5_ctx ctx;
15279 hash_table <pointer_hash <tree_node> > ht;
15280 int i;
15281
15282 ht.create (32);
15283 md5_init_ctx (&ctx);
15284 fold_checksum_tree (fn, &ctx, ht);
15285 md5_finish_ctx (&ctx, checksum_before_fn);
15286 ht.empty ();
15287
15288 md5_init_ctx (&ctx);
15289 for (i = 0; i < nargs; i++)
15290 fold_checksum_tree (argarray[i], &ctx, ht);
15291 md5_finish_ctx (&ctx, checksum_before_arglist);
15292 ht.empty ();
15293 #endif
15294
15295 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
15296
15297 #ifdef ENABLE_FOLD_CHECKING
15298 md5_init_ctx (&ctx);
15299 fold_checksum_tree (fn, &ctx, ht);
15300 md5_finish_ctx (&ctx, checksum_after_fn);
15301 ht.empty ();
15302
15303 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
15304 fold_check_failed (fn, tem);
15305
15306 md5_init_ctx (&ctx);
15307 for (i = 0; i < nargs; i++)
15308 fold_checksum_tree (argarray[i], &ctx, ht);
15309 md5_finish_ctx (&ctx, checksum_after_arglist);
15310 ht.dispose ();
15311
15312 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
15313 fold_check_failed (NULL_TREE, tem);
15314 #endif
15315 return tem;
15316 }
15317
15318 /* Perform constant folding and related simplification of initializer
15319 expression EXPR. These behave identically to "fold_buildN" but ignore
15320 potential run-time traps and exceptions that fold must preserve. */
15321
15322 #define START_FOLD_INIT \
15323 int saved_signaling_nans = flag_signaling_nans;\
15324 int saved_trapping_math = flag_trapping_math;\
15325 int saved_rounding_math = flag_rounding_math;\
15326 int saved_trapv = flag_trapv;\
15327 int saved_folding_initializer = folding_initializer;\
15328 flag_signaling_nans = 0;\
15329 flag_trapping_math = 0;\
15330 flag_rounding_math = 0;\
15331 flag_trapv = 0;\
15332 folding_initializer = 1;
15333
15334 #define END_FOLD_INIT \
15335 flag_signaling_nans = saved_signaling_nans;\
15336 flag_trapping_math = saved_trapping_math;\
15337 flag_rounding_math = saved_rounding_math;\
15338 flag_trapv = saved_trapv;\
15339 folding_initializer = saved_folding_initializer;
15340
15341 tree
15342 fold_build1_initializer_loc (location_t loc, enum tree_code code,
15343 tree type, tree op)
15344 {
15345 tree result;
15346 START_FOLD_INIT;
15347
15348 result = fold_build1_loc (loc, code, type, op);
15349
15350 END_FOLD_INIT;
15351 return result;
15352 }
15353
15354 tree
15355 fold_build2_initializer_loc (location_t loc, enum tree_code code,
15356 tree type, tree op0, tree op1)
15357 {
15358 tree result;
15359 START_FOLD_INIT;
15360
15361 result = fold_build2_loc (loc, code, type, op0, op1);
15362
15363 END_FOLD_INIT;
15364 return result;
15365 }
15366
15367 tree
15368 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
15369 int nargs, tree *argarray)
15370 {
15371 tree result;
15372 START_FOLD_INIT;
15373
15374 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
15375
15376 END_FOLD_INIT;
15377 return result;
15378 }
15379
15380 #undef START_FOLD_INIT
15381 #undef END_FOLD_INIT
15382
15383 /* Determine if first argument is a multiple of second argument. Return 0 if
15384 it is not, or we cannot easily determined it to be.
15385
15386 An example of the sort of thing we care about (at this point; this routine
15387 could surely be made more general, and expanded to do what the *_DIV_EXPR's
15388 fold cases do now) is discovering that
15389
15390 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15391
15392 is a multiple of
15393
15394 SAVE_EXPR (J * 8)
15395
15396 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
15397
15398 This code also handles discovering that
15399
15400 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
15401
15402 is a multiple of 8 so we don't have to worry about dealing with a
15403 possible remainder.
15404
15405 Note that we *look* inside a SAVE_EXPR only to determine how it was
15406 calculated; it is not safe for fold to do much of anything else with the
15407 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
15408 at run time. For example, the latter example above *cannot* be implemented
15409 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
15410 evaluation time of the original SAVE_EXPR is not necessarily the same at
15411 the time the new expression is evaluated. The only optimization of this
15412 sort that would be valid is changing
15413
15414 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
15415
15416 divided by 8 to
15417
15418 SAVE_EXPR (I) * SAVE_EXPR (J)
15419
15420 (where the same SAVE_EXPR (J) is used in the original and the
15421 transformed version). */
15422
15423 int
15424 multiple_of_p (tree type, const_tree top, const_tree bottom)
15425 {
15426 if (operand_equal_p (top, bottom, 0))
15427 return 1;
15428
15429 if (TREE_CODE (type) != INTEGER_TYPE)
15430 return 0;
15431
15432 switch (TREE_CODE (top))
15433 {
15434 case BIT_AND_EXPR:
15435 /* Bitwise and provides a power of two multiple. If the mask is
15436 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
15437 if (!integer_pow2p (bottom))
15438 return 0;
15439 /* FALLTHRU */
15440
15441 case MULT_EXPR:
15442 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15443 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15444
15445 case PLUS_EXPR:
15446 case MINUS_EXPR:
15447 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
15448 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
15449
15450 case LSHIFT_EXPR:
15451 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
15452 {
15453 tree op1, t1;
15454
15455 op1 = TREE_OPERAND (top, 1);
15456 /* const_binop may not detect overflow correctly,
15457 so check for it explicitly here. */
15458 if (TYPE_PRECISION (TREE_TYPE (size_one_node))
15459 > TREE_INT_CST_LOW (op1)
15460 && TREE_INT_CST_HIGH (op1) == 0
15461 && 0 != (t1 = fold_convert (type,
15462 const_binop (LSHIFT_EXPR,
15463 size_one_node,
15464 op1)))
15465 && !TREE_OVERFLOW (t1))
15466 return multiple_of_p (type, t1, bottom);
15467 }
15468 return 0;
15469
15470 case NOP_EXPR:
15471 /* Can't handle conversions from non-integral or wider integral type. */
15472 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
15473 || (TYPE_PRECISION (type)
15474 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
15475 return 0;
15476
15477 /* .. fall through ... */
15478
15479 case SAVE_EXPR:
15480 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
15481
15482 case COND_EXPR:
15483 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
15484 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
15485
15486 case INTEGER_CST:
15487 if (TREE_CODE (bottom) != INTEGER_CST
15488 || integer_zerop (bottom)
15489 || (TYPE_UNSIGNED (type)
15490 && (tree_int_cst_sgn (top) < 0
15491 || tree_int_cst_sgn (bottom) < 0)))
15492 return 0;
15493 return integer_zerop (int_const_binop (TRUNC_MOD_EXPR,
15494 top, bottom));
15495
15496 default:
15497 return 0;
15498 }
15499 }
15500
15501 /* Return true if CODE or TYPE is known to be non-negative. */
15502
15503 static bool
15504 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
15505 {
15506 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
15507 && truth_value_p (code))
15508 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
15509 have a signed:1 type (where the value is -1 and 0). */
15510 return true;
15511 return false;
15512 }
15513
15514 /* Return true if (CODE OP0) is known to be non-negative. If the return
15515 value is based on the assumption that signed overflow is undefined,
15516 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15517 *STRICT_OVERFLOW_P. */
15518
15519 bool
15520 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15521 bool *strict_overflow_p)
15522 {
15523 if (TYPE_UNSIGNED (type))
15524 return true;
15525
15526 switch (code)
15527 {
15528 case ABS_EXPR:
15529 /* We can't return 1 if flag_wrapv is set because
15530 ABS_EXPR<INT_MIN> = INT_MIN. */
15531 if (!INTEGRAL_TYPE_P (type))
15532 return true;
15533 if (TYPE_OVERFLOW_UNDEFINED (type))
15534 {
15535 *strict_overflow_p = true;
15536 return true;
15537 }
15538 break;
15539
15540 case NON_LVALUE_EXPR:
15541 case FLOAT_EXPR:
15542 case FIX_TRUNC_EXPR:
15543 return tree_expr_nonnegative_warnv_p (op0,
15544 strict_overflow_p);
15545
15546 case NOP_EXPR:
15547 {
15548 tree inner_type = TREE_TYPE (op0);
15549 tree outer_type = type;
15550
15551 if (TREE_CODE (outer_type) == REAL_TYPE)
15552 {
15553 if (TREE_CODE (inner_type) == REAL_TYPE)
15554 return tree_expr_nonnegative_warnv_p (op0,
15555 strict_overflow_p);
15556 if (INTEGRAL_TYPE_P (inner_type))
15557 {
15558 if (TYPE_UNSIGNED (inner_type))
15559 return true;
15560 return tree_expr_nonnegative_warnv_p (op0,
15561 strict_overflow_p);
15562 }
15563 }
15564 else if (INTEGRAL_TYPE_P (outer_type))
15565 {
15566 if (TREE_CODE (inner_type) == REAL_TYPE)
15567 return tree_expr_nonnegative_warnv_p (op0,
15568 strict_overflow_p);
15569 if (INTEGRAL_TYPE_P (inner_type))
15570 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
15571 && TYPE_UNSIGNED (inner_type);
15572 }
15573 }
15574 break;
15575
15576 default:
15577 return tree_simple_nonnegative_warnv_p (code, type);
15578 }
15579
15580 /* We don't know sign of `t', so be conservative and return false. */
15581 return false;
15582 }
15583
15584 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
15585 value is based on the assumption that signed overflow is undefined,
15586 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15587 *STRICT_OVERFLOW_P. */
15588
15589 bool
15590 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
15591 tree op1, bool *strict_overflow_p)
15592 {
15593 if (TYPE_UNSIGNED (type))
15594 return true;
15595
15596 switch (code)
15597 {
15598 case POINTER_PLUS_EXPR:
15599 case PLUS_EXPR:
15600 if (FLOAT_TYPE_P (type))
15601 return (tree_expr_nonnegative_warnv_p (op0,
15602 strict_overflow_p)
15603 && tree_expr_nonnegative_warnv_p (op1,
15604 strict_overflow_p));
15605
15606 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
15607 both unsigned and at least 2 bits shorter than the result. */
15608 if (TREE_CODE (type) == INTEGER_TYPE
15609 && TREE_CODE (op0) == NOP_EXPR
15610 && TREE_CODE (op1) == NOP_EXPR)
15611 {
15612 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
15613 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
15614 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
15615 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
15616 {
15617 unsigned int prec = MAX (TYPE_PRECISION (inner1),
15618 TYPE_PRECISION (inner2)) + 1;
15619 return prec < TYPE_PRECISION (type);
15620 }
15621 }
15622 break;
15623
15624 case MULT_EXPR:
15625 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
15626 {
15627 /* x * x is always non-negative for floating point x
15628 or without overflow. */
15629 if (operand_equal_p (op0, op1, 0)
15630 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
15631 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
15632 {
15633 if (TYPE_OVERFLOW_UNDEFINED (type))
15634 *strict_overflow_p = true;
15635 return true;
15636 }
15637 }
15638
15639 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
15640 both unsigned and their total bits is shorter than the result. */
15641 if (TREE_CODE (type) == INTEGER_TYPE
15642 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
15643 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
15644 {
15645 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
15646 ? TREE_TYPE (TREE_OPERAND (op0, 0))
15647 : TREE_TYPE (op0);
15648 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
15649 ? TREE_TYPE (TREE_OPERAND (op1, 0))
15650 : TREE_TYPE (op1);
15651
15652 bool unsigned0 = TYPE_UNSIGNED (inner0);
15653 bool unsigned1 = TYPE_UNSIGNED (inner1);
15654
15655 if (TREE_CODE (op0) == INTEGER_CST)
15656 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
15657
15658 if (TREE_CODE (op1) == INTEGER_CST)
15659 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
15660
15661 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
15662 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
15663 {
15664 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
15665 ? tree_int_cst_min_precision (op0, /*unsignedp=*/true)
15666 : TYPE_PRECISION (inner0);
15667
15668 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
15669 ? tree_int_cst_min_precision (op1, /*unsignedp=*/true)
15670 : TYPE_PRECISION (inner1);
15671
15672 return precision0 + precision1 < TYPE_PRECISION (type);
15673 }
15674 }
15675 return false;
15676
15677 case BIT_AND_EXPR:
15678 case MAX_EXPR:
15679 return (tree_expr_nonnegative_warnv_p (op0,
15680 strict_overflow_p)
15681 || tree_expr_nonnegative_warnv_p (op1,
15682 strict_overflow_p));
15683
15684 case BIT_IOR_EXPR:
15685 case BIT_XOR_EXPR:
15686 case MIN_EXPR:
15687 case RDIV_EXPR:
15688 case TRUNC_DIV_EXPR:
15689 case CEIL_DIV_EXPR:
15690 case FLOOR_DIV_EXPR:
15691 case ROUND_DIV_EXPR:
15692 return (tree_expr_nonnegative_warnv_p (op0,
15693 strict_overflow_p)
15694 && tree_expr_nonnegative_warnv_p (op1,
15695 strict_overflow_p));
15696
15697 case TRUNC_MOD_EXPR:
15698 case CEIL_MOD_EXPR:
15699 case FLOOR_MOD_EXPR:
15700 case ROUND_MOD_EXPR:
15701 return tree_expr_nonnegative_warnv_p (op0,
15702 strict_overflow_p);
15703 default:
15704 return tree_simple_nonnegative_warnv_p (code, type);
15705 }
15706
15707 /* We don't know sign of `t', so be conservative and return false. */
15708 return false;
15709 }
15710
15711 /* Return true if T is known to be non-negative. If the return
15712 value is based on the assumption that signed overflow is undefined,
15713 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15714 *STRICT_OVERFLOW_P. */
15715
15716 bool
15717 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15718 {
15719 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15720 return true;
15721
15722 switch (TREE_CODE (t))
15723 {
15724 case INTEGER_CST:
15725 return tree_int_cst_sgn (t) >= 0;
15726
15727 case REAL_CST:
15728 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
15729
15730 case FIXED_CST:
15731 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
15732
15733 case COND_EXPR:
15734 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15735 strict_overflow_p)
15736 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
15737 strict_overflow_p));
15738 default:
15739 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15740 TREE_TYPE (t));
15741 }
15742 /* We don't know sign of `t', so be conservative and return false. */
15743 return false;
15744 }
15745
15746 /* Return true if T is known to be non-negative. If the return
15747 value is based on the assumption that signed overflow is undefined,
15748 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15749 *STRICT_OVERFLOW_P. */
15750
15751 bool
15752 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
15753 tree arg0, tree arg1, bool *strict_overflow_p)
15754 {
15755 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
15756 switch (DECL_FUNCTION_CODE (fndecl))
15757 {
15758 CASE_FLT_FN (BUILT_IN_ACOS):
15759 CASE_FLT_FN (BUILT_IN_ACOSH):
15760 CASE_FLT_FN (BUILT_IN_CABS):
15761 CASE_FLT_FN (BUILT_IN_COSH):
15762 CASE_FLT_FN (BUILT_IN_ERFC):
15763 CASE_FLT_FN (BUILT_IN_EXP):
15764 CASE_FLT_FN (BUILT_IN_EXP10):
15765 CASE_FLT_FN (BUILT_IN_EXP2):
15766 CASE_FLT_FN (BUILT_IN_FABS):
15767 CASE_FLT_FN (BUILT_IN_FDIM):
15768 CASE_FLT_FN (BUILT_IN_HYPOT):
15769 CASE_FLT_FN (BUILT_IN_POW10):
15770 CASE_INT_FN (BUILT_IN_FFS):
15771 CASE_INT_FN (BUILT_IN_PARITY):
15772 CASE_INT_FN (BUILT_IN_POPCOUNT):
15773 CASE_INT_FN (BUILT_IN_CLZ):
15774 CASE_INT_FN (BUILT_IN_CLRSB):
15775 case BUILT_IN_BSWAP32:
15776 case BUILT_IN_BSWAP64:
15777 /* Always true. */
15778 return true;
15779
15780 CASE_FLT_FN (BUILT_IN_SQRT):
15781 /* sqrt(-0.0) is -0.0. */
15782 if (!HONOR_SIGNED_ZEROS (TYPE_MODE (type)))
15783 return true;
15784 return tree_expr_nonnegative_warnv_p (arg0,
15785 strict_overflow_p);
15786
15787 CASE_FLT_FN (BUILT_IN_ASINH):
15788 CASE_FLT_FN (BUILT_IN_ATAN):
15789 CASE_FLT_FN (BUILT_IN_ATANH):
15790 CASE_FLT_FN (BUILT_IN_CBRT):
15791 CASE_FLT_FN (BUILT_IN_CEIL):
15792 CASE_FLT_FN (BUILT_IN_ERF):
15793 CASE_FLT_FN (BUILT_IN_EXPM1):
15794 CASE_FLT_FN (BUILT_IN_FLOOR):
15795 CASE_FLT_FN (BUILT_IN_FMOD):
15796 CASE_FLT_FN (BUILT_IN_FREXP):
15797 CASE_FLT_FN (BUILT_IN_ICEIL):
15798 CASE_FLT_FN (BUILT_IN_IFLOOR):
15799 CASE_FLT_FN (BUILT_IN_IRINT):
15800 CASE_FLT_FN (BUILT_IN_IROUND):
15801 CASE_FLT_FN (BUILT_IN_LCEIL):
15802 CASE_FLT_FN (BUILT_IN_LDEXP):
15803 CASE_FLT_FN (BUILT_IN_LFLOOR):
15804 CASE_FLT_FN (BUILT_IN_LLCEIL):
15805 CASE_FLT_FN (BUILT_IN_LLFLOOR):
15806 CASE_FLT_FN (BUILT_IN_LLRINT):
15807 CASE_FLT_FN (BUILT_IN_LLROUND):
15808 CASE_FLT_FN (BUILT_IN_LRINT):
15809 CASE_FLT_FN (BUILT_IN_LROUND):
15810 CASE_FLT_FN (BUILT_IN_MODF):
15811 CASE_FLT_FN (BUILT_IN_NEARBYINT):
15812 CASE_FLT_FN (BUILT_IN_RINT):
15813 CASE_FLT_FN (BUILT_IN_ROUND):
15814 CASE_FLT_FN (BUILT_IN_SCALB):
15815 CASE_FLT_FN (BUILT_IN_SCALBLN):
15816 CASE_FLT_FN (BUILT_IN_SCALBN):
15817 CASE_FLT_FN (BUILT_IN_SIGNBIT):
15818 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
15819 CASE_FLT_FN (BUILT_IN_SINH):
15820 CASE_FLT_FN (BUILT_IN_TANH):
15821 CASE_FLT_FN (BUILT_IN_TRUNC):
15822 /* True if the 1st argument is nonnegative. */
15823 return tree_expr_nonnegative_warnv_p (arg0,
15824 strict_overflow_p);
15825
15826 CASE_FLT_FN (BUILT_IN_FMAX):
15827 /* True if the 1st OR 2nd arguments are nonnegative. */
15828 return (tree_expr_nonnegative_warnv_p (arg0,
15829 strict_overflow_p)
15830 || (tree_expr_nonnegative_warnv_p (arg1,
15831 strict_overflow_p)));
15832
15833 CASE_FLT_FN (BUILT_IN_FMIN):
15834 /* True if the 1st AND 2nd arguments are nonnegative. */
15835 return (tree_expr_nonnegative_warnv_p (arg0,
15836 strict_overflow_p)
15837 && (tree_expr_nonnegative_warnv_p (arg1,
15838 strict_overflow_p)));
15839
15840 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15841 /* True if the 2nd argument is nonnegative. */
15842 return tree_expr_nonnegative_warnv_p (arg1,
15843 strict_overflow_p);
15844
15845 CASE_FLT_FN (BUILT_IN_POWI):
15846 /* True if the 1st argument is nonnegative or the second
15847 argument is an even integer. */
15848 if (TREE_CODE (arg1) == INTEGER_CST
15849 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15850 return true;
15851 return tree_expr_nonnegative_warnv_p (arg0,
15852 strict_overflow_p);
15853
15854 CASE_FLT_FN (BUILT_IN_POW):
15855 /* True if the 1st argument is nonnegative or the second
15856 argument is an even integer valued real. */
15857 if (TREE_CODE (arg1) == REAL_CST)
15858 {
15859 REAL_VALUE_TYPE c;
15860 HOST_WIDE_INT n;
15861
15862 c = TREE_REAL_CST (arg1);
15863 n = real_to_integer (&c);
15864 if ((n & 1) == 0)
15865 {
15866 REAL_VALUE_TYPE cint;
15867 real_from_integer (&cint, VOIDmode, n,
15868 n < 0 ? -1 : 0, 0);
15869 if (real_identical (&c, &cint))
15870 return true;
15871 }
15872 }
15873 return tree_expr_nonnegative_warnv_p (arg0,
15874 strict_overflow_p);
15875
15876 default:
15877 break;
15878 }
15879 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15880 type);
15881 }
15882
15883 /* Return true if T is known to be non-negative. If the return
15884 value is based on the assumption that signed overflow is undefined,
15885 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15886 *STRICT_OVERFLOW_P. */
15887
15888 static bool
15889 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15890 {
15891 enum tree_code code = TREE_CODE (t);
15892 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15893 return true;
15894
15895 switch (code)
15896 {
15897 case TARGET_EXPR:
15898 {
15899 tree temp = TARGET_EXPR_SLOT (t);
15900 t = TARGET_EXPR_INITIAL (t);
15901
15902 /* If the initializer is non-void, then it's a normal expression
15903 that will be assigned to the slot. */
15904 if (!VOID_TYPE_P (t))
15905 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15906
15907 /* Otherwise, the initializer sets the slot in some way. One common
15908 way is an assignment statement at the end of the initializer. */
15909 while (1)
15910 {
15911 if (TREE_CODE (t) == BIND_EXPR)
15912 t = expr_last (BIND_EXPR_BODY (t));
15913 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15914 || TREE_CODE (t) == TRY_CATCH_EXPR)
15915 t = expr_last (TREE_OPERAND (t, 0));
15916 else if (TREE_CODE (t) == STATEMENT_LIST)
15917 t = expr_last (t);
15918 else
15919 break;
15920 }
15921 if (TREE_CODE (t) == MODIFY_EXPR
15922 && TREE_OPERAND (t, 0) == temp)
15923 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15924 strict_overflow_p);
15925
15926 return false;
15927 }
15928
15929 case CALL_EXPR:
15930 {
15931 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15932 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15933
15934 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15935 get_callee_fndecl (t),
15936 arg0,
15937 arg1,
15938 strict_overflow_p);
15939 }
15940 case COMPOUND_EXPR:
15941 case MODIFY_EXPR:
15942 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15943 strict_overflow_p);
15944 case BIND_EXPR:
15945 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15946 strict_overflow_p);
15947 case SAVE_EXPR:
15948 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15949 strict_overflow_p);
15950
15951 default:
15952 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15953 TREE_TYPE (t));
15954 }
15955
15956 /* We don't know sign of `t', so be conservative and return false. */
15957 return false;
15958 }
15959
15960 /* Return true if T is known to be non-negative. If the return
15961 value is based on the assumption that signed overflow is undefined,
15962 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15963 *STRICT_OVERFLOW_P. */
15964
15965 bool
15966 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15967 {
15968 enum tree_code code;
15969 if (t == error_mark_node)
15970 return false;
15971
15972 code = TREE_CODE (t);
15973 switch (TREE_CODE_CLASS (code))
15974 {
15975 case tcc_binary:
15976 case tcc_comparison:
15977 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15978 TREE_TYPE (t),
15979 TREE_OPERAND (t, 0),
15980 TREE_OPERAND (t, 1),
15981 strict_overflow_p);
15982
15983 case tcc_unary:
15984 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15985 TREE_TYPE (t),
15986 TREE_OPERAND (t, 0),
15987 strict_overflow_p);
15988
15989 case tcc_constant:
15990 case tcc_declaration:
15991 case tcc_reference:
15992 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15993
15994 default:
15995 break;
15996 }
15997
15998 switch (code)
15999 {
16000 case TRUTH_AND_EXPR:
16001 case TRUTH_OR_EXPR:
16002 case TRUTH_XOR_EXPR:
16003 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
16004 TREE_TYPE (t),
16005 TREE_OPERAND (t, 0),
16006 TREE_OPERAND (t, 1),
16007 strict_overflow_p);
16008 case TRUTH_NOT_EXPR:
16009 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
16010 TREE_TYPE (t),
16011 TREE_OPERAND (t, 0),
16012 strict_overflow_p);
16013
16014 case COND_EXPR:
16015 case CONSTRUCTOR:
16016 case OBJ_TYPE_REF:
16017 case ASSERT_EXPR:
16018 case ADDR_EXPR:
16019 case WITH_SIZE_EXPR:
16020 case SSA_NAME:
16021 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
16022
16023 default:
16024 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
16025 }
16026 }
16027
16028 /* Return true if `t' is known to be non-negative. Handle warnings
16029 about undefined signed overflow. */
16030
16031 bool
16032 tree_expr_nonnegative_p (tree t)
16033 {
16034 bool ret, strict_overflow_p;
16035
16036 strict_overflow_p = false;
16037 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
16038 if (strict_overflow_p)
16039 fold_overflow_warning (("assuming signed overflow does not occur when "
16040 "determining that expression is always "
16041 "non-negative"),
16042 WARN_STRICT_OVERFLOW_MISC);
16043 return ret;
16044 }
16045
16046
16047 /* Return true when (CODE OP0) is an address and is known to be nonzero.
16048 For floating point we further ensure that T is not denormal.
16049 Similar logic is present in nonzero_address in rtlanal.h.
16050
16051 If the return value is based on the assumption that signed overflow
16052 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16053 change *STRICT_OVERFLOW_P. */
16054
16055 bool
16056 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
16057 bool *strict_overflow_p)
16058 {
16059 switch (code)
16060 {
16061 case ABS_EXPR:
16062 return tree_expr_nonzero_warnv_p (op0,
16063 strict_overflow_p);
16064
16065 case NOP_EXPR:
16066 {
16067 tree inner_type = TREE_TYPE (op0);
16068 tree outer_type = type;
16069
16070 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
16071 && tree_expr_nonzero_warnv_p (op0,
16072 strict_overflow_p));
16073 }
16074 break;
16075
16076 case NON_LVALUE_EXPR:
16077 return tree_expr_nonzero_warnv_p (op0,
16078 strict_overflow_p);
16079
16080 default:
16081 break;
16082 }
16083
16084 return false;
16085 }
16086
16087 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
16088 For floating point we further ensure that T is not denormal.
16089 Similar logic is present in nonzero_address in rtlanal.h.
16090
16091 If the return value is based on the assumption that signed overflow
16092 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16093 change *STRICT_OVERFLOW_P. */
16094
16095 bool
16096 tree_binary_nonzero_warnv_p (enum tree_code code,
16097 tree type,
16098 tree op0,
16099 tree op1, bool *strict_overflow_p)
16100 {
16101 bool sub_strict_overflow_p;
16102 switch (code)
16103 {
16104 case POINTER_PLUS_EXPR:
16105 case PLUS_EXPR:
16106 if (TYPE_OVERFLOW_UNDEFINED (type))
16107 {
16108 /* With the presence of negative values it is hard
16109 to say something. */
16110 sub_strict_overflow_p = false;
16111 if (!tree_expr_nonnegative_warnv_p (op0,
16112 &sub_strict_overflow_p)
16113 || !tree_expr_nonnegative_warnv_p (op1,
16114 &sub_strict_overflow_p))
16115 return false;
16116 /* One of operands must be positive and the other non-negative. */
16117 /* We don't set *STRICT_OVERFLOW_P here: even if this value
16118 overflows, on a twos-complement machine the sum of two
16119 nonnegative numbers can never be zero. */
16120 return (tree_expr_nonzero_warnv_p (op0,
16121 strict_overflow_p)
16122 || tree_expr_nonzero_warnv_p (op1,
16123 strict_overflow_p));
16124 }
16125 break;
16126
16127 case MULT_EXPR:
16128 if (TYPE_OVERFLOW_UNDEFINED (type))
16129 {
16130 if (tree_expr_nonzero_warnv_p (op0,
16131 strict_overflow_p)
16132 && tree_expr_nonzero_warnv_p (op1,
16133 strict_overflow_p))
16134 {
16135 *strict_overflow_p = true;
16136 return true;
16137 }
16138 }
16139 break;
16140
16141 case MIN_EXPR:
16142 sub_strict_overflow_p = false;
16143 if (tree_expr_nonzero_warnv_p (op0,
16144 &sub_strict_overflow_p)
16145 && tree_expr_nonzero_warnv_p (op1,
16146 &sub_strict_overflow_p))
16147 {
16148 if (sub_strict_overflow_p)
16149 *strict_overflow_p = true;
16150 }
16151 break;
16152
16153 case MAX_EXPR:
16154 sub_strict_overflow_p = false;
16155 if (tree_expr_nonzero_warnv_p (op0,
16156 &sub_strict_overflow_p))
16157 {
16158 if (sub_strict_overflow_p)
16159 *strict_overflow_p = true;
16160
16161 /* When both operands are nonzero, then MAX must be too. */
16162 if (tree_expr_nonzero_warnv_p (op1,
16163 strict_overflow_p))
16164 return true;
16165
16166 /* MAX where operand 0 is positive is positive. */
16167 return tree_expr_nonnegative_warnv_p (op0,
16168 strict_overflow_p);
16169 }
16170 /* MAX where operand 1 is positive is positive. */
16171 else if (tree_expr_nonzero_warnv_p (op1,
16172 &sub_strict_overflow_p)
16173 && tree_expr_nonnegative_warnv_p (op1,
16174 &sub_strict_overflow_p))
16175 {
16176 if (sub_strict_overflow_p)
16177 *strict_overflow_p = true;
16178 return true;
16179 }
16180 break;
16181
16182 case BIT_IOR_EXPR:
16183 return (tree_expr_nonzero_warnv_p (op1,
16184 strict_overflow_p)
16185 || tree_expr_nonzero_warnv_p (op0,
16186 strict_overflow_p));
16187
16188 default:
16189 break;
16190 }
16191
16192 return false;
16193 }
16194
16195 /* Return true when T is an address and is known to be nonzero.
16196 For floating point we further ensure that T is not denormal.
16197 Similar logic is present in nonzero_address in rtlanal.h.
16198
16199 If the return value is based on the assumption that signed overflow
16200 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
16201 change *STRICT_OVERFLOW_P. */
16202
16203 bool
16204 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
16205 {
16206 bool sub_strict_overflow_p;
16207 switch (TREE_CODE (t))
16208 {
16209 case INTEGER_CST:
16210 return !integer_zerop (t);
16211
16212 case ADDR_EXPR:
16213 {
16214 tree base = TREE_OPERAND (t, 0);
16215 if (!DECL_P (base))
16216 base = get_base_address (base);
16217
16218 if (!base)
16219 return false;
16220
16221 /* Weak declarations may link to NULL. Other things may also be NULL
16222 so protect with -fdelete-null-pointer-checks; but not variables
16223 allocated on the stack. */
16224 if (DECL_P (base)
16225 && (flag_delete_null_pointer_checks
16226 || (DECL_CONTEXT (base)
16227 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
16228 && auto_var_in_fn_p (base, DECL_CONTEXT (base)))))
16229 return !VAR_OR_FUNCTION_DECL_P (base) || !DECL_WEAK (base);
16230
16231 /* Constants are never weak. */
16232 if (CONSTANT_CLASS_P (base))
16233 return true;
16234
16235 return false;
16236 }
16237
16238 case COND_EXPR:
16239 sub_strict_overflow_p = false;
16240 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
16241 &sub_strict_overflow_p)
16242 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
16243 &sub_strict_overflow_p))
16244 {
16245 if (sub_strict_overflow_p)
16246 *strict_overflow_p = true;
16247 return true;
16248 }
16249 break;
16250
16251 default:
16252 break;
16253 }
16254 return false;
16255 }
16256
16257 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
16258 attempt to fold the expression to a constant without modifying TYPE,
16259 OP0 or OP1.
16260
16261 If the expression could be simplified to a constant, then return
16262 the constant. If the expression would not be simplified to a
16263 constant, then return NULL_TREE. */
16264
16265 tree
16266 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
16267 {
16268 tree tem = fold_binary (code, type, op0, op1);
16269 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16270 }
16271
16272 /* Given the components of a unary expression CODE, TYPE and OP0,
16273 attempt to fold the expression to a constant without modifying
16274 TYPE or OP0.
16275
16276 If the expression could be simplified to a constant, then return
16277 the constant. If the expression would not be simplified to a
16278 constant, then return NULL_TREE. */
16279
16280 tree
16281 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
16282 {
16283 tree tem = fold_unary (code, type, op0);
16284 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
16285 }
16286
16287 /* If EXP represents referencing an element in a constant string
16288 (either via pointer arithmetic or array indexing), return the
16289 tree representing the value accessed, otherwise return NULL. */
16290
16291 tree
16292 fold_read_from_constant_string (tree exp)
16293 {
16294 if ((TREE_CODE (exp) == INDIRECT_REF
16295 || TREE_CODE (exp) == ARRAY_REF)
16296 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
16297 {
16298 tree exp1 = TREE_OPERAND (exp, 0);
16299 tree index;
16300 tree string;
16301 location_t loc = EXPR_LOCATION (exp);
16302
16303 if (TREE_CODE (exp) == INDIRECT_REF)
16304 string = string_constant (exp1, &index);
16305 else
16306 {
16307 tree low_bound = array_ref_low_bound (exp);
16308 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
16309
16310 /* Optimize the special-case of a zero lower bound.
16311
16312 We convert the low_bound to sizetype to avoid some problems
16313 with constant folding. (E.g. suppose the lower bound is 1,
16314 and its mode is QI. Without the conversion,l (ARRAY
16315 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
16316 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
16317 if (! integer_zerop (low_bound))
16318 index = size_diffop_loc (loc, index,
16319 fold_convert_loc (loc, sizetype, low_bound));
16320
16321 string = exp1;
16322 }
16323
16324 if (string
16325 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
16326 && TREE_CODE (string) == STRING_CST
16327 && TREE_CODE (index) == INTEGER_CST
16328 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
16329 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
16330 == MODE_INT)
16331 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
16332 return build_int_cst_type (TREE_TYPE (exp),
16333 (TREE_STRING_POINTER (string)
16334 [TREE_INT_CST_LOW (index)]));
16335 }
16336 return NULL;
16337 }
16338
16339 /* Return the tree for neg (ARG0) when ARG0 is known to be either
16340 an integer constant, real, or fixed-point constant.
16341
16342 TYPE is the type of the result. */
16343
16344 static tree
16345 fold_negate_const (tree arg0, tree type)
16346 {
16347 tree t = NULL_TREE;
16348
16349 switch (TREE_CODE (arg0))
16350 {
16351 case INTEGER_CST:
16352 {
16353 double_int val = tree_to_double_int (arg0);
16354 bool overflow;
16355 val = val.neg_with_overflow (&overflow);
16356 t = force_fit_type_double (type, val, 1,
16357 (overflow | TREE_OVERFLOW (arg0))
16358 && !TYPE_UNSIGNED (type));
16359 break;
16360 }
16361
16362 case REAL_CST:
16363 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16364 break;
16365
16366 case FIXED_CST:
16367 {
16368 FIXED_VALUE_TYPE f;
16369 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
16370 &(TREE_FIXED_CST (arg0)), NULL,
16371 TYPE_SATURATING (type));
16372 t = build_fixed (type, f);
16373 /* Propagate overflow flags. */
16374 if (overflow_p | TREE_OVERFLOW (arg0))
16375 TREE_OVERFLOW (t) = 1;
16376 break;
16377 }
16378
16379 default:
16380 gcc_unreachable ();
16381 }
16382
16383 return t;
16384 }
16385
16386 /* Return the tree for abs (ARG0) when ARG0 is known to be either
16387 an integer constant or real constant.
16388
16389 TYPE is the type of the result. */
16390
16391 tree
16392 fold_abs_const (tree arg0, tree type)
16393 {
16394 tree t = NULL_TREE;
16395
16396 switch (TREE_CODE (arg0))
16397 {
16398 case INTEGER_CST:
16399 {
16400 double_int val = tree_to_double_int (arg0);
16401
16402 /* If the value is unsigned or non-negative, then the absolute value
16403 is the same as the ordinary value. */
16404 if (TYPE_UNSIGNED (type)
16405 || !val.is_negative ())
16406 t = arg0;
16407
16408 /* If the value is negative, then the absolute value is
16409 its negation. */
16410 else
16411 {
16412 bool overflow;
16413 val = val.neg_with_overflow (&overflow);
16414 t = force_fit_type_double (type, val, -1,
16415 overflow | TREE_OVERFLOW (arg0));
16416 }
16417 }
16418 break;
16419
16420 case REAL_CST:
16421 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
16422 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
16423 else
16424 t = arg0;
16425 break;
16426
16427 default:
16428 gcc_unreachable ();
16429 }
16430
16431 return t;
16432 }
16433
16434 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
16435 constant. TYPE is the type of the result. */
16436
16437 static tree
16438 fold_not_const (const_tree arg0, tree type)
16439 {
16440 double_int val;
16441
16442 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
16443
16444 val = ~tree_to_double_int (arg0);
16445 return force_fit_type_double (type, val, 0, TREE_OVERFLOW (arg0));
16446 }
16447
16448 /* Given CODE, a relational operator, the target type, TYPE and two
16449 constant operands OP0 and OP1, return the result of the
16450 relational operation. If the result is not a compile time
16451 constant, then return NULL_TREE. */
16452
16453 static tree
16454 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
16455 {
16456 int result, invert;
16457
16458 /* From here on, the only cases we handle are when the result is
16459 known to be a constant. */
16460
16461 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
16462 {
16463 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
16464 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
16465
16466 /* Handle the cases where either operand is a NaN. */
16467 if (real_isnan (c0) || real_isnan (c1))
16468 {
16469 switch (code)
16470 {
16471 case EQ_EXPR:
16472 case ORDERED_EXPR:
16473 result = 0;
16474 break;
16475
16476 case NE_EXPR:
16477 case UNORDERED_EXPR:
16478 case UNLT_EXPR:
16479 case UNLE_EXPR:
16480 case UNGT_EXPR:
16481 case UNGE_EXPR:
16482 case UNEQ_EXPR:
16483 result = 1;
16484 break;
16485
16486 case LT_EXPR:
16487 case LE_EXPR:
16488 case GT_EXPR:
16489 case GE_EXPR:
16490 case LTGT_EXPR:
16491 if (flag_trapping_math)
16492 return NULL_TREE;
16493 result = 0;
16494 break;
16495
16496 default:
16497 gcc_unreachable ();
16498 }
16499
16500 return constant_boolean_node (result, type);
16501 }
16502
16503 return constant_boolean_node (real_compare (code, c0, c1), type);
16504 }
16505
16506 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
16507 {
16508 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
16509 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
16510 return constant_boolean_node (fixed_compare (code, c0, c1), type);
16511 }
16512
16513 /* Handle equality/inequality of complex constants. */
16514 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
16515 {
16516 tree rcond = fold_relational_const (code, type,
16517 TREE_REALPART (op0),
16518 TREE_REALPART (op1));
16519 tree icond = fold_relational_const (code, type,
16520 TREE_IMAGPART (op0),
16521 TREE_IMAGPART (op1));
16522 if (code == EQ_EXPR)
16523 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
16524 else if (code == NE_EXPR)
16525 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
16526 else
16527 return NULL_TREE;
16528 }
16529
16530 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
16531 {
16532 unsigned count = VECTOR_CST_NELTS (op0);
16533 tree *elts = XALLOCAVEC (tree, count);
16534 gcc_assert (VECTOR_CST_NELTS (op1) == count
16535 && TYPE_VECTOR_SUBPARTS (type) == count);
16536
16537 for (unsigned i = 0; i < count; i++)
16538 {
16539 tree elem_type = TREE_TYPE (type);
16540 tree elem0 = VECTOR_CST_ELT (op0, i);
16541 tree elem1 = VECTOR_CST_ELT (op1, i);
16542
16543 tree tem = fold_relational_const (code, elem_type,
16544 elem0, elem1);
16545
16546 if (tem == NULL_TREE)
16547 return NULL_TREE;
16548
16549 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
16550 }
16551
16552 return build_vector (type, elts);
16553 }
16554
16555 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
16556
16557 To compute GT, swap the arguments and do LT.
16558 To compute GE, do LT and invert the result.
16559 To compute LE, swap the arguments, do LT and invert the result.
16560 To compute NE, do EQ and invert the result.
16561
16562 Therefore, the code below must handle only EQ and LT. */
16563
16564 if (code == LE_EXPR || code == GT_EXPR)
16565 {
16566 tree tem = op0;
16567 op0 = op1;
16568 op1 = tem;
16569 code = swap_tree_comparison (code);
16570 }
16571
16572 /* Note that it is safe to invert for real values here because we
16573 have already handled the one case that it matters. */
16574
16575 invert = 0;
16576 if (code == NE_EXPR || code == GE_EXPR)
16577 {
16578 invert = 1;
16579 code = invert_tree_comparison (code, false);
16580 }
16581
16582 /* Compute a result for LT or EQ if args permit;
16583 Otherwise return T. */
16584 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
16585 {
16586 if (code == EQ_EXPR)
16587 result = tree_int_cst_equal (op0, op1);
16588 else if (TYPE_UNSIGNED (TREE_TYPE (op0)))
16589 result = INT_CST_LT_UNSIGNED (op0, op1);
16590 else
16591 result = INT_CST_LT (op0, op1);
16592 }
16593 else
16594 return NULL_TREE;
16595
16596 if (invert)
16597 result ^= 1;
16598 return constant_boolean_node (result, type);
16599 }
16600
16601 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
16602 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
16603 itself. */
16604
16605 tree
16606 fold_build_cleanup_point_expr (tree type, tree expr)
16607 {
16608 /* If the expression does not have side effects then we don't have to wrap
16609 it with a cleanup point expression. */
16610 if (!TREE_SIDE_EFFECTS (expr))
16611 return expr;
16612
16613 /* If the expression is a return, check to see if the expression inside the
16614 return has no side effects or the right hand side of the modify expression
16615 inside the return. If either don't have side effects set we don't need to
16616 wrap the expression in a cleanup point expression. Note we don't check the
16617 left hand side of the modify because it should always be a return decl. */
16618 if (TREE_CODE (expr) == RETURN_EXPR)
16619 {
16620 tree op = TREE_OPERAND (expr, 0);
16621 if (!op || !TREE_SIDE_EFFECTS (op))
16622 return expr;
16623 op = TREE_OPERAND (op, 1);
16624 if (!TREE_SIDE_EFFECTS (op))
16625 return expr;
16626 }
16627
16628 return build1 (CLEANUP_POINT_EXPR, type, expr);
16629 }
16630
16631 /* Given a pointer value OP0 and a type TYPE, return a simplified version
16632 of an indirection through OP0, or NULL_TREE if no simplification is
16633 possible. */
16634
16635 tree
16636 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
16637 {
16638 tree sub = op0;
16639 tree subtype;
16640
16641 STRIP_NOPS (sub);
16642 subtype = TREE_TYPE (sub);
16643 if (!POINTER_TYPE_P (subtype))
16644 return NULL_TREE;
16645
16646 if (TREE_CODE (sub) == ADDR_EXPR)
16647 {
16648 tree op = TREE_OPERAND (sub, 0);
16649 tree optype = TREE_TYPE (op);
16650 /* *&CONST_DECL -> to the value of the const decl. */
16651 if (TREE_CODE (op) == CONST_DECL)
16652 return DECL_INITIAL (op);
16653 /* *&p => p; make sure to handle *&"str"[cst] here. */
16654 if (type == optype)
16655 {
16656 tree fop = fold_read_from_constant_string (op);
16657 if (fop)
16658 return fop;
16659 else
16660 return op;
16661 }
16662 /* *(foo *)&fooarray => fooarray[0] */
16663 else if (TREE_CODE (optype) == ARRAY_TYPE
16664 && type == TREE_TYPE (optype)
16665 && (!in_gimple_form
16666 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16667 {
16668 tree type_domain = TYPE_DOMAIN (optype);
16669 tree min_val = size_zero_node;
16670 if (type_domain && TYPE_MIN_VALUE (type_domain))
16671 min_val = TYPE_MIN_VALUE (type_domain);
16672 if (in_gimple_form
16673 && TREE_CODE (min_val) != INTEGER_CST)
16674 return NULL_TREE;
16675 return build4_loc (loc, ARRAY_REF, type, op, min_val,
16676 NULL_TREE, NULL_TREE);
16677 }
16678 /* *(foo *)&complexfoo => __real__ complexfoo */
16679 else if (TREE_CODE (optype) == COMPLEX_TYPE
16680 && type == TREE_TYPE (optype))
16681 return fold_build1_loc (loc, REALPART_EXPR, type, op);
16682 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
16683 else if (TREE_CODE (optype) == VECTOR_TYPE
16684 && type == TREE_TYPE (optype))
16685 {
16686 tree part_width = TYPE_SIZE (type);
16687 tree index = bitsize_int (0);
16688 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
16689 }
16690 }
16691
16692 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
16693 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
16694 {
16695 tree op00 = TREE_OPERAND (sub, 0);
16696 tree op01 = TREE_OPERAND (sub, 1);
16697
16698 STRIP_NOPS (op00);
16699 if (TREE_CODE (op00) == ADDR_EXPR)
16700 {
16701 tree op00type;
16702 op00 = TREE_OPERAND (op00, 0);
16703 op00type = TREE_TYPE (op00);
16704
16705 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
16706 if (TREE_CODE (op00type) == VECTOR_TYPE
16707 && type == TREE_TYPE (op00type))
16708 {
16709 HOST_WIDE_INT offset = tree_low_cst (op01, 0);
16710 tree part_width = TYPE_SIZE (type);
16711 unsigned HOST_WIDE_INT part_widthi = tree_low_cst (part_width, 0)/BITS_PER_UNIT;
16712 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
16713 tree index = bitsize_int (indexi);
16714
16715 if (offset/part_widthi <= TYPE_VECTOR_SUBPARTS (op00type))
16716 return fold_build3_loc (loc,
16717 BIT_FIELD_REF, type, op00,
16718 part_width, index);
16719
16720 }
16721 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
16722 else if (TREE_CODE (op00type) == COMPLEX_TYPE
16723 && type == TREE_TYPE (op00type))
16724 {
16725 tree size = TYPE_SIZE_UNIT (type);
16726 if (tree_int_cst_equal (size, op01))
16727 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
16728 }
16729 /* ((foo *)&fooarray)[1] => fooarray[1] */
16730 else if (TREE_CODE (op00type) == ARRAY_TYPE
16731 && type == TREE_TYPE (op00type))
16732 {
16733 tree type_domain = TYPE_DOMAIN (op00type);
16734 tree min_val = size_zero_node;
16735 if (type_domain && TYPE_MIN_VALUE (type_domain))
16736 min_val = TYPE_MIN_VALUE (type_domain);
16737 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
16738 TYPE_SIZE_UNIT (type));
16739 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
16740 return build4_loc (loc, ARRAY_REF, type, op00, op01,
16741 NULL_TREE, NULL_TREE);
16742 }
16743 }
16744 }
16745
16746 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
16747 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
16748 && type == TREE_TYPE (TREE_TYPE (subtype))
16749 && (!in_gimple_form
16750 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
16751 {
16752 tree type_domain;
16753 tree min_val = size_zero_node;
16754 sub = build_fold_indirect_ref_loc (loc, sub);
16755 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
16756 if (type_domain && TYPE_MIN_VALUE (type_domain))
16757 min_val = TYPE_MIN_VALUE (type_domain);
16758 if (in_gimple_form
16759 && TREE_CODE (min_val) != INTEGER_CST)
16760 return NULL_TREE;
16761 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
16762 NULL_TREE);
16763 }
16764
16765 return NULL_TREE;
16766 }
16767
16768 /* Builds an expression for an indirection through T, simplifying some
16769 cases. */
16770
16771 tree
16772 build_fold_indirect_ref_loc (location_t loc, tree t)
16773 {
16774 tree type = TREE_TYPE (TREE_TYPE (t));
16775 tree sub = fold_indirect_ref_1 (loc, type, t);
16776
16777 if (sub)
16778 return sub;
16779
16780 return build1_loc (loc, INDIRECT_REF, type, t);
16781 }
16782
16783 /* Given an INDIRECT_REF T, return either T or a simplified version. */
16784
16785 tree
16786 fold_indirect_ref_loc (location_t loc, tree t)
16787 {
16788 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
16789
16790 if (sub)
16791 return sub;
16792 else
16793 return t;
16794 }
16795
16796 /* Strip non-trapping, non-side-effecting tree nodes from an expression
16797 whose result is ignored. The type of the returned tree need not be
16798 the same as the original expression. */
16799
16800 tree
16801 fold_ignored_result (tree t)
16802 {
16803 if (!TREE_SIDE_EFFECTS (t))
16804 return integer_zero_node;
16805
16806 for (;;)
16807 switch (TREE_CODE_CLASS (TREE_CODE (t)))
16808 {
16809 case tcc_unary:
16810 t = TREE_OPERAND (t, 0);
16811 break;
16812
16813 case tcc_binary:
16814 case tcc_comparison:
16815 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16816 t = TREE_OPERAND (t, 0);
16817 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
16818 t = TREE_OPERAND (t, 1);
16819 else
16820 return t;
16821 break;
16822
16823 case tcc_expression:
16824 switch (TREE_CODE (t))
16825 {
16826 case COMPOUND_EXPR:
16827 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
16828 return t;
16829 t = TREE_OPERAND (t, 0);
16830 break;
16831
16832 case COND_EXPR:
16833 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
16834 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
16835 return t;
16836 t = TREE_OPERAND (t, 0);
16837 break;
16838
16839 default:
16840 return t;
16841 }
16842 break;
16843
16844 default:
16845 return t;
16846 }
16847 }
16848
16849 /* Return the value of VALUE, rounded up to a multiple of DIVISOR.
16850 This can only be applied to objects of a sizetype. */
16851
16852 tree
16853 round_up_loc (location_t loc, tree value, int divisor)
16854 {
16855 tree div = NULL_TREE;
16856
16857 gcc_assert (divisor > 0);
16858 if (divisor == 1)
16859 return value;
16860
16861 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16862 have to do anything. Only do this when we are not given a const,
16863 because in that case, this check is more expensive than just
16864 doing it. */
16865 if (TREE_CODE (value) != INTEGER_CST)
16866 {
16867 div = build_int_cst (TREE_TYPE (value), divisor);
16868
16869 if (multiple_of_p (TREE_TYPE (value), value, div))
16870 return value;
16871 }
16872
16873 /* If divisor is a power of two, simplify this to bit manipulation. */
16874 if (divisor == (divisor & -divisor))
16875 {
16876 if (TREE_CODE (value) == INTEGER_CST)
16877 {
16878 double_int val = tree_to_double_int (value);
16879 bool overflow_p;
16880
16881 if ((val.low & (divisor - 1)) == 0)
16882 return value;
16883
16884 overflow_p = TREE_OVERFLOW (value);
16885 val.low &= ~(divisor - 1);
16886 val.low += divisor;
16887 if (val.low == 0)
16888 {
16889 val.high++;
16890 if (val.high == 0)
16891 overflow_p = true;
16892 }
16893
16894 return force_fit_type_double (TREE_TYPE (value), val,
16895 -1, overflow_p);
16896 }
16897 else
16898 {
16899 tree t;
16900
16901 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16902 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16903 t = build_int_cst (TREE_TYPE (value), -divisor);
16904 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16905 }
16906 }
16907 else
16908 {
16909 if (!div)
16910 div = build_int_cst (TREE_TYPE (value), divisor);
16911 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16912 value = size_binop_loc (loc, MULT_EXPR, value, div);
16913 }
16914
16915 return value;
16916 }
16917
16918 /* Likewise, but round down. */
16919
16920 tree
16921 round_down_loc (location_t loc, tree value, int divisor)
16922 {
16923 tree div = NULL_TREE;
16924
16925 gcc_assert (divisor > 0);
16926 if (divisor == 1)
16927 return value;
16928
16929 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16930 have to do anything. Only do this when we are not given a const,
16931 because in that case, this check is more expensive than just
16932 doing it. */
16933 if (TREE_CODE (value) != INTEGER_CST)
16934 {
16935 div = build_int_cst (TREE_TYPE (value), divisor);
16936
16937 if (multiple_of_p (TREE_TYPE (value), value, div))
16938 return value;
16939 }
16940
16941 /* If divisor is a power of two, simplify this to bit manipulation. */
16942 if (divisor == (divisor & -divisor))
16943 {
16944 tree t;
16945
16946 t = build_int_cst (TREE_TYPE (value), -divisor);
16947 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16948 }
16949 else
16950 {
16951 if (!div)
16952 div = build_int_cst (TREE_TYPE (value), divisor);
16953 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16954 value = size_binop_loc (loc, MULT_EXPR, value, div);
16955 }
16956
16957 return value;
16958 }
16959
16960 /* Returns the pointer to the base of the object addressed by EXP and
16961 extracts the information about the offset of the access, storing it
16962 to PBITPOS and POFFSET. */
16963
16964 static tree
16965 split_address_to_core_and_offset (tree exp,
16966 HOST_WIDE_INT *pbitpos, tree *poffset)
16967 {
16968 tree core;
16969 enum machine_mode mode;
16970 int unsignedp, volatilep;
16971 HOST_WIDE_INT bitsize;
16972 location_t loc = EXPR_LOCATION (exp);
16973
16974 if (TREE_CODE (exp) == ADDR_EXPR)
16975 {
16976 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16977 poffset, &mode, &unsignedp, &volatilep,
16978 false);
16979 core = build_fold_addr_expr_loc (loc, core);
16980 }
16981 else
16982 {
16983 core = exp;
16984 *pbitpos = 0;
16985 *poffset = NULL_TREE;
16986 }
16987
16988 return core;
16989 }
16990
16991 /* Returns true if addresses of E1 and E2 differ by a constant, false
16992 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16993
16994 bool
16995 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16996 {
16997 tree core1, core2;
16998 HOST_WIDE_INT bitpos1, bitpos2;
16999 tree toffset1, toffset2, tdiff, type;
17000
17001 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
17002 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
17003
17004 if (bitpos1 % BITS_PER_UNIT != 0
17005 || bitpos2 % BITS_PER_UNIT != 0
17006 || !operand_equal_p (core1, core2, 0))
17007 return false;
17008
17009 if (toffset1 && toffset2)
17010 {
17011 type = TREE_TYPE (toffset1);
17012 if (type != TREE_TYPE (toffset2))
17013 toffset2 = fold_convert (type, toffset2);
17014
17015 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
17016 if (!cst_and_fits_in_hwi (tdiff))
17017 return false;
17018
17019 *diff = int_cst_value (tdiff);
17020 }
17021 else if (toffset1 || toffset2)
17022 {
17023 /* If only one of the offsets is non-constant, the difference cannot
17024 be a constant. */
17025 return false;
17026 }
17027 else
17028 *diff = 0;
17029
17030 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
17031 return true;
17032 }
17033
17034 /* Simplify the floating point expression EXP when the sign of the
17035 result is not significant. Return NULL_TREE if no simplification
17036 is possible. */
17037
17038 tree
17039 fold_strip_sign_ops (tree exp)
17040 {
17041 tree arg0, arg1;
17042 location_t loc = EXPR_LOCATION (exp);
17043
17044 switch (TREE_CODE (exp))
17045 {
17046 case ABS_EXPR:
17047 case NEGATE_EXPR:
17048 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17049 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
17050
17051 case MULT_EXPR:
17052 case RDIV_EXPR:
17053 if (HONOR_SIGN_DEPENDENT_ROUNDING (TYPE_MODE (TREE_TYPE (exp))))
17054 return NULL_TREE;
17055 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
17056 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17057 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
17058 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
17059 arg0 ? arg0 : TREE_OPERAND (exp, 0),
17060 arg1 ? arg1 : TREE_OPERAND (exp, 1));
17061 break;
17062
17063 case COMPOUND_EXPR:
17064 arg0 = TREE_OPERAND (exp, 0);
17065 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17066 if (arg1)
17067 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
17068 break;
17069
17070 case COND_EXPR:
17071 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
17072 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
17073 if (arg0 || arg1)
17074 return fold_build3_loc (loc,
17075 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
17076 arg0 ? arg0 : TREE_OPERAND (exp, 1),
17077 arg1 ? arg1 : TREE_OPERAND (exp, 2));
17078 break;
17079
17080 case CALL_EXPR:
17081 {
17082 const enum built_in_function fcode = builtin_mathfn_code (exp);
17083 switch (fcode)
17084 {
17085 CASE_FLT_FN (BUILT_IN_COPYSIGN):
17086 /* Strip copysign function call, return the 1st argument. */
17087 arg0 = CALL_EXPR_ARG (exp, 0);
17088 arg1 = CALL_EXPR_ARG (exp, 1);
17089 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
17090
17091 default:
17092 /* Strip sign ops from the argument of "odd" math functions. */
17093 if (negate_mathfn_p (fcode))
17094 {
17095 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
17096 if (arg0)
17097 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
17098 }
17099 break;
17100 }
17101 }
17102 break;
17103
17104 default:
17105 break;
17106 }
17107 return NULL_TREE;
17108 }