re PR ipa/65270 (issues with merging memory accesses from different code paths)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
102
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
106
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
127 };
128
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
176
177
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
180
181 static location_t
182 expr_location_or (tree t, location_t loc)
183 {
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
186 }
187
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
190
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
193 {
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
199 {
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
202 }
203 return x;
204 }
205 \f
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
209
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
212 {
213 widest_int quo;
214
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
218
219 return NULL_TREE;
220 }
221 \f
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
230
231 static int fold_deferring_overflow_warnings;
232
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
237
238 static const char* fold_deferred_overflow_warning;
239
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
242
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
244
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
247
248 void
249 fold_defer_overflow_warnings (void)
250 {
251 ++fold_deferring_overflow_warnings;
252 }
253
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
262
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
265 {
266 const char *warnmsg;
267 location_t locus;
268
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
272 {
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
278 }
279
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
282
283 if (!issue || warnmsg == NULL)
284 return;
285
286 if (gimple_no_warning_p (stmt))
287 return;
288
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
293
294 if (!issue_strict_overflow_warning (code))
295 return;
296
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
302 }
303
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
306
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
309 {
310 fold_undefer_overflow_warnings (false, NULL, 0);
311 }
312
313 /* Whether we are deferring overflow warnings. */
314
315 bool
316 fold_deferring_overflow_warnings_p (void)
317 {
318 return fold_deferring_overflow_warnings > 0;
319 }
320
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
323
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
326 {
327 if (fold_deferring_overflow_warnings > 0)
328 {
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
331 {
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
334 }
335 }
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
338 }
339 \f
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
342
343 static bool
344 negate_mathfn_p (enum built_in_function code)
345 {
346 switch (code)
347 {
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
372
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
378
379 default:
380 break;
381 }
382 return false;
383 }
384
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
387
388 bool
389 may_negate_without_overflow_p (const_tree t)
390 {
391 tree type;
392
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
394
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
398
399 return !wi::only_sign_bit_p (t);
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 return true;
430
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
433
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
442
443 case VECTOR_CST:
444 {
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
447
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
449
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
453
454 return true;
455 }
456
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
460
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
463
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
475
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
482
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
486
487 /* Fall through. */
488
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
494
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
504 {
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
517 }
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
521
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
525 {
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
529 }
530 break;
531
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
537
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
541 {
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
545 }
546 break;
547
548 default:
549 break;
550 }
551 return false;
552 }
553
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
558
559 static tree
560 fold_negate_expr (location_t loc, tree t)
561 {
562 tree type = TREE_TYPE (t);
563 tree tem;
564
565 switch (TREE_CODE (t))
566 {
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
573
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
583
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
587
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
591
592 case COMPLEX_CST:
593 {
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
598 }
599 break;
600
601 case VECTOR_CST:
602 {
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
605
606 for (i = 0; i < count; i++)
607 {
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
611 }
612
613 return build_vector (type, elts);
614 }
615
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
622
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
628
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
633
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
637 {
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
642 {
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
646 }
647
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
650 {
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
654 }
655 }
656 break;
657
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
666
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670
671 /* Fall through. */
672
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
675 {
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
684 }
685 break;
686
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
696 {
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
701 {
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
708 }
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
723 }
724 break;
725
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
729 {
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
733 }
734 break;
735
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
740 {
741 tree fndecl, arg;
742
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
746 }
747 break;
748
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
752 {
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
986 {
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
992
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1019 }
1020
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1029
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1033 {
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1039 }
1040
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1046
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1062
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1087
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1093
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1099
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1105
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1111
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1115
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1119
1120 default:
1121 return NULL_TREE;
1122 }
1123
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1128
1129 return t;
1130 }
1131
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1134 {
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1136 }
1137
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1142
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1145 {
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1149
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1152
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1154 {
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1158
1159 return int_const_binop (code, arg1, arg2);
1160 }
1161
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1163 {
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1171
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1174 {
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1182
1183 default:
1184 return NULL_TREE;
1185 }
1186
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1189
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1192
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1198
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1205
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1212
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1215
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1224
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1233
1234 t = build_real (type, result);
1235
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1238 }
1239
1240 if (TREE_CODE (arg1) == FIXED_CST)
1241 {
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1248
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1251 {
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1260
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1263 {
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1270 }
1271 break;
1272
1273 default:
1274 return NULL_TREE;
1275 }
1276
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1286 }
1287
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1289 {
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1296
1297 switch (code)
1298 {
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1304
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1310
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1318
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1330 {
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1333
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1337 */
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1350
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1353 }
1354 else
1355 {
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1358
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1364
1365 if (integer_nonzerop (compare))
1366 {
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1380
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1384 }
1385 else
1386 {
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1397
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1401
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1405 }
1406 }
1407 break;
1408
1409 default:
1410 return NULL_TREE;
1411 }
1412
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1415 }
1416
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1419 {
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1423
1424 for (i = 0; i < count; i++)
1425 {
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1428
1429 elts[i] = const_binop (code, elem1, elem2);
1430
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1435 }
1436
1437 return build_vector (type, elts);
1438 }
1439
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1443 {
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1447
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 elts[i] = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1488 {
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1491
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1497
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1502
1503 for (i = 0; i < nelts; i++)
1504 {
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1510 }
1511
1512 return build_vector (type, elts);
1513 }
1514
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1519 {
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1523
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1528
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1533
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1542
1543 for (out = 0; out < nelts; out++)
1544 {
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1548
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1551
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1557 }
1558
1559 return build_vector (type, elts);
1560 }
1561
1562 default:;
1563 }
1564
1565 if (TREE_CODE_CLASS (code) != tcc_binary)
1566 return NULL_TREE;
1567
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type)
1570 == TYPE_SATURATING (TREE_TYPE (arg1)));
1571
1572 return const_binop (code, arg1, arg2);
1573 }
1574
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1577
1578 tree
1579 const_unop (enum tree_code code, tree type, tree arg0)
1580 {
1581 switch (code)
1582 {
1583 CASE_CONVERT:
1584 case FLOAT_EXPR:
1585 case FIX_TRUNC_EXPR:
1586 case FIXED_CONVERT_EXPR:
1587 return fold_convert_const (code, type, arg0);
1588
1589 case ADDR_SPACE_CONVERT_EXPR:
1590 if (integer_zerop (arg0))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1593
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1596
1597 case NEGATE_EXPR:
1598 {
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1606 }
1607
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1612
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 {
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1619 }
1620 break;
1621
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1627 {
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1631
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1634 {
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1640 }
1641 if (i == count)
1642 return build_vector (type, elements);
1643 }
1644 break;
1645
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1650
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1655
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1660
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1665 {
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1669
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1673
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1677
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1681
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1686
1687 for (i = 0; i < nelts; i++)
1688 {
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1692 }
1693
1694 return build_vector (type, elts);
1695 }
1696
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1700 {
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1704
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1708
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1712
1713 switch (code)
1714 {
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1719 }
1720
1721 for (i = 1; i < nelts; i++)
1722 {
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1726 }
1727
1728 return elts[0];
1729 }
1730
1731 default:
1732 break;
1733 }
1734
1735 return NULL_TREE;
1736 }
1737
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1740
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1743 {
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1745 }
1746 \f
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1751
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1754 {
1755 tree type = TREE_TYPE (arg0);
1756
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1759
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1762
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1765 {
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1768 {
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1773 }
1774 else if (code == MINUS_EXPR)
1775 {
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1778 }
1779 else if (code == MULT_EXPR)
1780 {
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1783 }
1784
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1789 }
1790
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1792 }
1793
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1797
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1800 {
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1803
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1806
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1810
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1817
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1825
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1841 }
1842 \f
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1845
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1848 {
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1855 }
1856
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1859
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1862 {
1863 bool overflow = false;
1864 tree t;
1865
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1874
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1878
1879 switch (code)
1880 {
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1884
1885 default:
1886 gcc_unreachable ();
1887 }
1888
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1891 {
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1894 }
1895
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1898
1899 if (! overflow)
1900 {
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (REAL_VALUES_LESS (r, l))
1904 {
1905 overflow = true;
1906 val = lt;
1907 }
1908 }
1909
1910 if (! overflow)
1911 {
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1914 {
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (REAL_VALUES_LESS (u, r))
1917 {
1918 overflow = true;
1919 val = ut;
1920 }
1921 }
1922 }
1923
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1926
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1929 }
1930
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1933
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1936 {
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1940
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1945 {
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1949
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1954 }
1955 else
1956 {
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1959 }
1960
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1967
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1975
1976 return t;
1977 }
1978
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1981
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1984 {
1985 REAL_VALUE_TYPE value;
1986 tree t;
1987
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1990
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1997 TREE_OVERFLOW (t) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_NANS (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2004 && REAL_VALUE_ISINF (value)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2006 TREE_OVERFLOW (t) = 1;
2007 else
2008 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 return t;
2010 }
2011
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2014
2015 static tree
2016 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2017 {
2018 REAL_VALUE_TYPE value;
2019 tree t;
2020
2021 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2022 t = build_real (type, value);
2023
2024 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 return t;
2026 }
2027
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2030
2031 static tree
2032 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2033 {
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037
2038 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2039 TYPE_SATURATING (type));
2040 t = build_fixed (type, value);
2041
2042 /* Propagate overflow flags. */
2043 if (overflow_p | TREE_OVERFLOW (arg1))
2044 TREE_OVERFLOW (t) = 1;
2045 return t;
2046 }
2047
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2050
2051 static tree
2052 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2053 {
2054 FIXED_VALUE_TYPE value;
2055 tree t;
2056 bool overflow_p;
2057 double_int di;
2058
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2060
2061 di.low = TREE_INT_CST_ELT (arg1, 0);
2062 if (TREE_INT_CST_NUNITS (arg1) == 1)
2063 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2064 else
2065 di.high = TREE_INT_CST_ELT (arg1, 1);
2066
2067 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2071
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2076 }
2077
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2080
2081 static tree
2082 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2083 {
2084 FIXED_VALUE_TYPE value;
2085 tree t;
2086 bool overflow_p;
2087
2088 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2089 &TREE_REAL_CST (arg1),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2092
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2097 }
2098
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2101
2102 static tree
2103 fold_convert_const (enum tree_code code, tree type, tree arg1)
2104 {
2105 if (TREE_TYPE (arg1) == type)
2106 return arg1;
2107
2108 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2109 || TREE_CODE (type) == OFFSET_TYPE)
2110 {
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_int_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_int_from_real (code, type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_int_from_fixed (type, arg1);
2117 }
2118 else if (TREE_CODE (type) == REAL_TYPE)
2119 {
2120 if (TREE_CODE (arg1) == INTEGER_CST)
2121 return build_real_from_int_cst (type, arg1);
2122 else if (TREE_CODE (arg1) == REAL_CST)
2123 return fold_convert_const_real_from_real (type, arg1);
2124 else if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_real_from_fixed (type, arg1);
2126 }
2127 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2128 {
2129 if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_fixed_from_fixed (type, arg1);
2131 else if (TREE_CODE (arg1) == INTEGER_CST)
2132 return fold_convert_const_fixed_from_int (type, arg1);
2133 else if (TREE_CODE (arg1) == REAL_CST)
2134 return fold_convert_const_fixed_from_real (type, arg1);
2135 }
2136 return NULL_TREE;
2137 }
2138
2139 /* Construct a vector of zero elements of vector type TYPE. */
2140
2141 static tree
2142 build_zero_vector (tree type)
2143 {
2144 tree t;
2145
2146 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2147 return build_vector_from_val (type, t);
2148 }
2149
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2151
2152 bool
2153 fold_convertible_p (const_tree type, const_tree arg)
2154 {
2155 tree orig = TREE_TYPE (arg);
2156
2157 if (type == orig)
2158 return true;
2159
2160 if (TREE_CODE (arg) == ERROR_MARK
2161 || TREE_CODE (type) == ERROR_MARK
2162 || TREE_CODE (orig) == ERROR_MARK)
2163 return false;
2164
2165 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2166 return true;
2167
2168 switch (TREE_CODE (type))
2169 {
2170 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2171 case POINTER_TYPE: case REFERENCE_TYPE:
2172 case OFFSET_TYPE:
2173 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == OFFSET_TYPE)
2175 return true;
2176 return (TREE_CODE (orig) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2178
2179 case REAL_TYPE:
2180 case FIXED_POINT_TYPE:
2181 case COMPLEX_TYPE:
2182 case VECTOR_TYPE:
2183 case VOID_TYPE:
2184 return TREE_CODE (type) == TREE_CODE (orig);
2185
2186 default:
2187 return false;
2188 }
2189 }
2190
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2193
2194 tree
2195 fold_convert_loc (location_t loc, tree type, tree arg)
2196 {
2197 tree orig = TREE_TYPE (arg);
2198 tree tem;
2199
2200 if (type == orig)
2201 return arg;
2202
2203 if (TREE_CODE (arg) == ERROR_MARK
2204 || TREE_CODE (type) == ERROR_MARK
2205 || TREE_CODE (orig) == ERROR_MARK)
2206 return error_mark_node;
2207
2208 switch (TREE_CODE (type))
2209 {
2210 case POINTER_TYPE:
2211 case REFERENCE_TYPE:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2216 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2217 /* fall through */
2218
2219 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2220 case OFFSET_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2222 {
2223 tem = fold_convert_const (NOP_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2226 }
2227 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2228 || TREE_CODE (orig) == OFFSET_TYPE)
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 if (TREE_CODE (orig) == COMPLEX_TYPE)
2231 return fold_convert_loc (loc, type,
2232 fold_build1_loc (loc, REALPART_EXPR,
2233 TREE_TYPE (orig), arg));
2234 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2236 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2237
2238 case REAL_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2240 {
2241 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2244 }
2245 else if (TREE_CODE (arg) == REAL_CST)
2246 {
2247 tem = fold_convert_const (NOP_EXPR, type, arg);
2248 if (tem != NULL_TREE)
2249 return tem;
2250 }
2251 else if (TREE_CODE (arg) == FIXED_CST)
2252 {
2253 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257
2258 switch (TREE_CODE (orig))
2259 {
2260 case INTEGER_TYPE:
2261 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2262 case POINTER_TYPE: case REFERENCE_TYPE:
2263 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2264
2265 case REAL_TYPE:
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2267
2268 case FIXED_POINT_TYPE:
2269 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2270
2271 case COMPLEX_TYPE:
2272 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2273 return fold_convert_loc (loc, type, tem);
2274
2275 default:
2276 gcc_unreachable ();
2277 }
2278
2279 case FIXED_POINT_TYPE:
2280 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2281 || TREE_CODE (arg) == REAL_CST)
2282 {
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 goto fold_convert_exit;
2286 }
2287
2288 switch (TREE_CODE (orig))
2289 {
2290 case FIXED_POINT_TYPE:
2291 case INTEGER_TYPE:
2292 case ENUMERAL_TYPE:
2293 case BOOLEAN_TYPE:
2294 case REAL_TYPE:
2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2296
2297 case COMPLEX_TYPE:
2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2299 return fold_convert_loc (loc, type, tem);
2300
2301 default:
2302 gcc_unreachable ();
2303 }
2304
2305 case COMPLEX_TYPE:
2306 switch (TREE_CODE (orig))
2307 {
2308 case INTEGER_TYPE:
2309 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2310 case POINTER_TYPE: case REFERENCE_TYPE:
2311 case REAL_TYPE:
2312 case FIXED_POINT_TYPE:
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2314 fold_convert_loc (loc, TREE_TYPE (type), arg),
2315 fold_convert_loc (loc, TREE_TYPE (type),
2316 integer_zero_node));
2317 case COMPLEX_TYPE:
2318 {
2319 tree rpart, ipart;
2320
2321 if (TREE_CODE (arg) == COMPLEX_EXPR)
2322 {
2323 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2324 TREE_OPERAND (arg, 0));
2325 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2326 TREE_OPERAND (arg, 1));
2327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2328 }
2329
2330 arg = save_expr (arg);
2331 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2332 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2334 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2336 }
2337
2338 default:
2339 gcc_unreachable ();
2340 }
2341
2342 case VECTOR_TYPE:
2343 if (integer_zerop (arg))
2344 return build_zero_vector (type);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2347 || TREE_CODE (orig) == VECTOR_TYPE);
2348 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2349
2350 case VOID_TYPE:
2351 tem = fold_ignored_result (arg);
2352 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2353
2354 default:
2355 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2356 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2357 gcc_unreachable ();
2358 }
2359 fold_convert_exit:
2360 protected_set_expr_location_unshare (tem, loc);
2361 return tem;
2362 }
2363 \f
2364 /* Return false if expr can be assumed not to be an lvalue, true
2365 otherwise. */
2366
2367 static bool
2368 maybe_lvalue_p (const_tree x)
2369 {
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x))
2372 {
2373 case VAR_DECL:
2374 case PARM_DECL:
2375 case RESULT_DECL:
2376 case LABEL_DECL:
2377 case FUNCTION_DECL:
2378 case SSA_NAME:
2379
2380 case COMPONENT_REF:
2381 case MEM_REF:
2382 case INDIRECT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case BIT_FIELD_REF:
2386 case OBJ_TYPE_REF:
2387
2388 case REALPART_EXPR:
2389 case IMAGPART_EXPR:
2390 case PREINCREMENT_EXPR:
2391 case PREDECREMENT_EXPR:
2392 case SAVE_EXPR:
2393 case TRY_CATCH_EXPR:
2394 case WITH_CLEANUP_EXPR:
2395 case COMPOUND_EXPR:
2396 case MODIFY_EXPR:
2397 case TARGET_EXPR:
2398 case COND_EXPR:
2399 case BIND_EXPR:
2400 break;
2401
2402 default:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2405 break;
2406 return false;
2407 }
2408
2409 return true;
2410 }
2411
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2413
2414 tree
2415 non_lvalue_loc (location_t loc, tree x)
2416 {
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2418 us. */
2419 if (in_gimple_form)
2420 return x;
2421
2422 if (! maybe_lvalue_p (x))
2423 return x;
2424 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2425 }
2426
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2429
2430 static tree
2431 pedantic_non_lvalue_loc (location_t loc, tree x)
2432 {
2433 return protected_set_expr_location_unshare (x, loc);
2434 }
2435 \f
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2440
2441 enum tree_code
2442 invert_tree_comparison (enum tree_code code, bool honor_nans)
2443 {
2444 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2445 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2446 return ERROR_MARK;
2447
2448 switch (code)
2449 {
2450 case EQ_EXPR:
2451 return NE_EXPR;
2452 case NE_EXPR:
2453 return EQ_EXPR;
2454 case GT_EXPR:
2455 return honor_nans ? UNLE_EXPR : LE_EXPR;
2456 case GE_EXPR:
2457 return honor_nans ? UNLT_EXPR : LT_EXPR;
2458 case LT_EXPR:
2459 return honor_nans ? UNGE_EXPR : GE_EXPR;
2460 case LE_EXPR:
2461 return honor_nans ? UNGT_EXPR : GT_EXPR;
2462 case LTGT_EXPR:
2463 return UNEQ_EXPR;
2464 case UNEQ_EXPR:
2465 return LTGT_EXPR;
2466 case UNGT_EXPR:
2467 return LE_EXPR;
2468 case UNGE_EXPR:
2469 return LT_EXPR;
2470 case UNLT_EXPR:
2471 return GE_EXPR;
2472 case UNLE_EXPR:
2473 return GT_EXPR;
2474 case ORDERED_EXPR:
2475 return UNORDERED_EXPR;
2476 case UNORDERED_EXPR:
2477 return ORDERED_EXPR;
2478 default:
2479 gcc_unreachable ();
2480 }
2481 }
2482
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2485
2486 enum tree_code
2487 swap_tree_comparison (enum tree_code code)
2488 {
2489 switch (code)
2490 {
2491 case EQ_EXPR:
2492 case NE_EXPR:
2493 case ORDERED_EXPR:
2494 case UNORDERED_EXPR:
2495 case LTGT_EXPR:
2496 case UNEQ_EXPR:
2497 return code;
2498 case GT_EXPR:
2499 return LT_EXPR;
2500 case GE_EXPR:
2501 return LE_EXPR;
2502 case LT_EXPR:
2503 return GT_EXPR;
2504 case LE_EXPR:
2505 return GE_EXPR;
2506 case UNGT_EXPR:
2507 return UNLT_EXPR;
2508 case UNGE_EXPR:
2509 return UNLE_EXPR;
2510 case UNLT_EXPR:
2511 return UNGT_EXPR;
2512 case UNLE_EXPR:
2513 return UNGE_EXPR;
2514 default:
2515 gcc_unreachable ();
2516 }
2517 }
2518
2519
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2523
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code)
2526 {
2527 switch (code)
2528 {
2529 case LT_EXPR:
2530 return COMPCODE_LT;
2531 case EQ_EXPR:
2532 return COMPCODE_EQ;
2533 case LE_EXPR:
2534 return COMPCODE_LE;
2535 case GT_EXPR:
2536 return COMPCODE_GT;
2537 case NE_EXPR:
2538 return COMPCODE_NE;
2539 case GE_EXPR:
2540 return COMPCODE_GE;
2541 case ORDERED_EXPR:
2542 return COMPCODE_ORD;
2543 case UNORDERED_EXPR:
2544 return COMPCODE_UNORD;
2545 case UNLT_EXPR:
2546 return COMPCODE_UNLT;
2547 case UNEQ_EXPR:
2548 return COMPCODE_UNEQ;
2549 case UNLE_EXPR:
2550 return COMPCODE_UNLE;
2551 case UNGT_EXPR:
2552 return COMPCODE_UNGT;
2553 case LTGT_EXPR:
2554 return COMPCODE_LTGT;
2555 case UNGE_EXPR:
2556 return COMPCODE_UNGE;
2557 default:
2558 gcc_unreachable ();
2559 }
2560 }
2561
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2565
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code)
2568 {
2569 switch (code)
2570 {
2571 case COMPCODE_LT:
2572 return LT_EXPR;
2573 case COMPCODE_EQ:
2574 return EQ_EXPR;
2575 case COMPCODE_LE:
2576 return LE_EXPR;
2577 case COMPCODE_GT:
2578 return GT_EXPR;
2579 case COMPCODE_NE:
2580 return NE_EXPR;
2581 case COMPCODE_GE:
2582 return GE_EXPR;
2583 case COMPCODE_ORD:
2584 return ORDERED_EXPR;
2585 case COMPCODE_UNORD:
2586 return UNORDERED_EXPR;
2587 case COMPCODE_UNLT:
2588 return UNLT_EXPR;
2589 case COMPCODE_UNEQ:
2590 return UNEQ_EXPR;
2591 case COMPCODE_UNLE:
2592 return UNLE_EXPR;
2593 case COMPCODE_UNGT:
2594 return UNGT_EXPR;
2595 case COMPCODE_LTGT:
2596 return LTGT_EXPR;
2597 case COMPCODE_UNGE:
2598 return UNGE_EXPR;
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2609
2610 tree
2611 combine_comparisons (location_t loc,
2612 enum tree_code code, enum tree_code lcode,
2613 enum tree_code rcode, tree truth_type,
2614 tree ll_arg, tree lr_arg)
2615 {
2616 bool honor_nans = HONOR_NANS (ll_arg);
2617 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2618 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2619 int compcode;
2620
2621 switch (code)
2622 {
2623 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2624 compcode = lcompcode & rcompcode;
2625 break;
2626
2627 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2628 compcode = lcompcode | rcompcode;
2629 break;
2630
2631 default:
2632 return NULL_TREE;
2633 }
2634
2635 if (!honor_nans)
2636 {
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode &= ~COMPCODE_UNORD;
2640 if (compcode == COMPCODE_LTGT)
2641 compcode = COMPCODE_NE;
2642 else if (compcode == COMPCODE_ORD)
2643 compcode = COMPCODE_TRUE;
2644 }
2645 else if (flag_trapping_math)
2646 {
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2650 && (lcompcode != COMPCODE_EQ)
2651 && (lcompcode != COMPCODE_ORD);
2652 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2653 && (rcompcode != COMPCODE_EQ)
2654 && (rcompcode != COMPCODE_ORD);
2655 bool trap = (compcode & COMPCODE_UNORD) == 0
2656 && (compcode != COMPCODE_EQ)
2657 && (compcode != COMPCODE_ORD);
2658
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2666 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2667 rtrap = false;
2668
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2671 if (rtrap && !ltrap
2672 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2673 return NULL_TREE;
2674
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap || rtrap) != trap)
2677 return NULL_TREE;
2678 }
2679
2680 if (compcode == COMPCODE_TRUE)
2681 return constant_boolean_node (true, truth_type);
2682 else if (compcode == COMPCODE_FALSE)
2683 return constant_boolean_node (false, truth_type);
2684 else
2685 {
2686 enum tree_code tcode;
2687
2688 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2689 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2690 }
2691 }
2692 \f
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2696
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2703
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2714
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2718
2719 int
2720 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2721 {
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2724 || TREE_TYPE (arg0) == error_mark_node
2725 || TREE_TYPE (arg1) == error_mark_node)
2726 return 0;
2727
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2731 return 0;
2732
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2736 return tree_int_cst_equal (arg0, arg1);
2737
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2746
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2751 return 0;
2752
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2758
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2761
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2766 && COMPARISON_CLASS_P (arg0)
2767 && COMPARISON_CLASS_P (arg1))
2768 {
2769 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2770
2771 if (TREE_CODE (arg0) == swap_code)
2772 return operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags);
2776 }
2777
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2781 return 0;
2782
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2788 return 0;
2789
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 return 1;
2804
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2809 {
2810 case INTEGER_CST:
2811 return tree_int_cst_equal (arg0, arg1);
2812
2813 case FIXED_CST:
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2816
2817 case REAL_CST:
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2819 TREE_REAL_CST (arg1)))
2820 return 1;
2821
2822
2823 if (!HONOR_SIGNED_ZEROS (arg0))
2824 {
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0) && real_zerop (arg1))
2828 return 1;
2829 }
2830 return 0;
2831
2832 case VECTOR_CST:
2833 {
2834 unsigned i;
2835
2836 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2837 return 0;
2838
2839 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2840 {
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2842 VECTOR_CST_ELT (arg1, i), flags))
2843 return 0;
2844 }
2845 return 1;
2846 }
2847
2848 case COMPLEX_CST:
2849 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 flags)
2851 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 flags));
2853
2854 case STRING_CST:
2855 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2856 && ! memcmp (TREE_STRING_POINTER (arg0),
2857 TREE_STRING_POINTER (arg1),
2858 TREE_STRING_LENGTH (arg0)));
2859
2860 case ADDR_EXPR:
2861 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2862 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2863 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2864 default:
2865 break;
2866 }
2867
2868 if (flags & OEP_ONLY_CONST)
2869 return 0;
2870
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2877
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2881
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2883 {
2884 case tcc_unary:
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0))
2887 {
2888 CASE_CONVERT:
2889 case FIX_TRUNC_EXPR:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2892 return 0;
2893 break;
2894 default:
2895 break;
2896 }
2897
2898 return OP_SAME (0);
2899
2900
2901 case tcc_comparison:
2902 case tcc_binary:
2903 if (OP_SAME (0) && OP_SAME (1))
2904 return 1;
2905
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0))
2908 && operand_equal_p (TREE_OPERAND (arg0, 0),
2909 TREE_OPERAND (arg1, 1), flags)
2910 && operand_equal_p (TREE_OPERAND (arg0, 1),
2911 TREE_OPERAND (arg1, 0), flags));
2912
2913 case tcc_reference:
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2918 && (TREE_SIDE_EFFECTS (arg0)
2919 || TREE_SIDE_EFFECTS (arg1)))
2920 return 0;
2921
2922 switch (TREE_CODE (arg0))
2923 {
2924 case INDIRECT_REF:
2925 if (!(flags & OEP_ADDRESS_OF)
2926 && (TYPE_ALIGN (TREE_TYPE (arg0))
2927 != TYPE_ALIGN (TREE_TYPE (arg1))))
2928 return 0;
2929 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2930 return OP_SAME (0);
2931
2932 case REALPART_EXPR:
2933 case IMAGPART_EXPR:
2934 return OP_SAME (0);
2935
2936 case TARGET_MEM_REF:
2937 case MEM_REF:
2938 /* Require equal access sizes, and similar pointer types.
2939 We can have incomplete types for array references of
2940 variable-sized arrays from the Fortran frontend
2941 though. Also verify the types are compatible. */
2942 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2943 || (TYPE_SIZE (TREE_TYPE (arg0))
2944 && TYPE_SIZE (TREE_TYPE (arg1))
2945 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2946 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2947 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2948 && ((flags & OEP_ADDRESS_OF)
2949 || (alias_ptr_types_compatible_p
2950 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2951 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2952 && (MR_DEPENDENCE_CLIQUE (arg0)
2953 == MR_DEPENDENCE_CLIQUE (arg1))
2954 && (MR_DEPENDENCE_BASE (arg0)
2955 == MR_DEPENDENCE_BASE (arg1))
2956 && (TYPE_ALIGN (TREE_TYPE (arg0))
2957 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2958 return 0;
2959 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2960 return (OP_SAME (0) && OP_SAME (1)
2961 /* TARGET_MEM_REF require equal extra operands. */
2962 && (TREE_CODE (arg0) != TARGET_MEM_REF
2963 || (OP_SAME_WITH_NULL (2)
2964 && OP_SAME_WITH_NULL (3)
2965 && OP_SAME_WITH_NULL (4))));
2966
2967 case ARRAY_REF:
2968 case ARRAY_RANGE_REF:
2969 /* Operands 2 and 3 may be null.
2970 Compare the array index by value if it is constant first as we
2971 may have different types but same value here. */
2972 if (!OP_SAME (0))
2973 return 0;
2974 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2975 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2976 TREE_OPERAND (arg1, 1))
2977 || OP_SAME (1))
2978 && OP_SAME_WITH_NULL (2)
2979 && OP_SAME_WITH_NULL (3));
2980
2981 case COMPONENT_REF:
2982 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2983 may be NULL when we're called to compare MEM_EXPRs. */
2984 if (!OP_SAME_WITH_NULL (0)
2985 || !OP_SAME (1))
2986 return 0;
2987 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2988 return OP_SAME_WITH_NULL (2);
2989
2990 case BIT_FIELD_REF:
2991 if (!OP_SAME (0))
2992 return 0;
2993 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2994 return OP_SAME (1) && OP_SAME (2);
2995
2996 default:
2997 return 0;
2998 }
2999
3000 case tcc_expression:
3001 switch (TREE_CODE (arg0))
3002 {
3003 case ADDR_EXPR:
3004 return operand_equal_p (TREE_OPERAND (arg0, 0),
3005 TREE_OPERAND (arg1, 0),
3006 flags | OEP_ADDRESS_OF);
3007
3008 case TRUTH_NOT_EXPR:
3009 return OP_SAME (0);
3010
3011 case TRUTH_ANDIF_EXPR:
3012 case TRUTH_ORIF_EXPR:
3013 return OP_SAME (0) && OP_SAME (1);
3014
3015 case FMA_EXPR:
3016 case WIDEN_MULT_PLUS_EXPR:
3017 case WIDEN_MULT_MINUS_EXPR:
3018 if (!OP_SAME (2))
3019 return 0;
3020 /* The multiplcation operands are commutative. */
3021 /* FALLTHRU */
3022
3023 case TRUTH_AND_EXPR:
3024 case TRUTH_OR_EXPR:
3025 case TRUTH_XOR_EXPR:
3026 if (OP_SAME (0) && OP_SAME (1))
3027 return 1;
3028
3029 /* Otherwise take into account this is a commutative operation. */
3030 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3031 TREE_OPERAND (arg1, 1), flags)
3032 && operand_equal_p (TREE_OPERAND (arg0, 1),
3033 TREE_OPERAND (arg1, 0), flags));
3034
3035 case COND_EXPR:
3036 case VEC_COND_EXPR:
3037 case DOT_PROD_EXPR:
3038 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3039
3040 default:
3041 return 0;
3042 }
3043
3044 case tcc_vl_exp:
3045 switch (TREE_CODE (arg0))
3046 {
3047 case CALL_EXPR:
3048 /* If the CALL_EXPRs call different functions, then they
3049 clearly can not be equal. */
3050 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3051 flags))
3052 return 0;
3053
3054 {
3055 unsigned int cef = call_expr_flags (arg0);
3056 if (flags & OEP_PURE_SAME)
3057 cef &= ECF_CONST | ECF_PURE;
3058 else
3059 cef &= ECF_CONST;
3060 if (!cef)
3061 return 0;
3062 }
3063
3064 /* Now see if all the arguments are the same. */
3065 {
3066 const_call_expr_arg_iterator iter0, iter1;
3067 const_tree a0, a1;
3068 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3069 a1 = first_const_call_expr_arg (arg1, &iter1);
3070 a0 && a1;
3071 a0 = next_const_call_expr_arg (&iter0),
3072 a1 = next_const_call_expr_arg (&iter1))
3073 if (! operand_equal_p (a0, a1, flags))
3074 return 0;
3075
3076 /* If we get here and both argument lists are exhausted
3077 then the CALL_EXPRs are equal. */
3078 return ! (a0 || a1);
3079 }
3080 default:
3081 return 0;
3082 }
3083
3084 case tcc_declaration:
3085 /* Consider __builtin_sqrt equal to sqrt. */
3086 return (TREE_CODE (arg0) == FUNCTION_DECL
3087 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3088 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3089 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3090
3091 default:
3092 return 0;
3093 }
3094
3095 #undef OP_SAME
3096 #undef OP_SAME_WITH_NULL
3097 }
3098 \f
3099 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3100 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3101
3102 When in doubt, return 0. */
3103
3104 static int
3105 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3106 {
3107 int unsignedp1, unsignedpo;
3108 tree primarg0, primarg1, primother;
3109 unsigned int correct_width;
3110
3111 if (operand_equal_p (arg0, arg1, 0))
3112 return 1;
3113
3114 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3115 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3116 return 0;
3117
3118 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3119 and see if the inner values are the same. This removes any
3120 signedness comparison, which doesn't matter here. */
3121 primarg0 = arg0, primarg1 = arg1;
3122 STRIP_NOPS (primarg0);
3123 STRIP_NOPS (primarg1);
3124 if (operand_equal_p (primarg0, primarg1, 0))
3125 return 1;
3126
3127 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3128 actual comparison operand, ARG0.
3129
3130 First throw away any conversions to wider types
3131 already present in the operands. */
3132
3133 primarg1 = get_narrower (arg1, &unsignedp1);
3134 primother = get_narrower (other, &unsignedpo);
3135
3136 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3137 if (unsignedp1 == unsignedpo
3138 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3139 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3140 {
3141 tree type = TREE_TYPE (arg0);
3142
3143 /* Make sure shorter operand is extended the right way
3144 to match the longer operand. */
3145 primarg1 = fold_convert (signed_or_unsigned_type_for
3146 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3147
3148 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3149 return 1;
3150 }
3151
3152 return 0;
3153 }
3154 \f
3155 /* See if ARG is an expression that is either a comparison or is performing
3156 arithmetic on comparisons. The comparisons must only be comparing
3157 two different values, which will be stored in *CVAL1 and *CVAL2; if
3158 they are nonzero it means that some operands have already been found.
3159 No variables may be used anywhere else in the expression except in the
3160 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3161 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3162
3163 If this is true, return 1. Otherwise, return zero. */
3164
3165 static int
3166 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3167 {
3168 enum tree_code code = TREE_CODE (arg);
3169 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3170
3171 /* We can handle some of the tcc_expression cases here. */
3172 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3173 tclass = tcc_unary;
3174 else if (tclass == tcc_expression
3175 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3176 || code == COMPOUND_EXPR))
3177 tclass = tcc_binary;
3178
3179 else if (tclass == tcc_expression && code == SAVE_EXPR
3180 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3181 {
3182 /* If we've already found a CVAL1 or CVAL2, this expression is
3183 two complex to handle. */
3184 if (*cval1 || *cval2)
3185 return 0;
3186
3187 tclass = tcc_unary;
3188 *save_p = 1;
3189 }
3190
3191 switch (tclass)
3192 {
3193 case tcc_unary:
3194 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3195
3196 case tcc_binary:
3197 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3198 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3199 cval1, cval2, save_p));
3200
3201 case tcc_constant:
3202 return 1;
3203
3204 case tcc_expression:
3205 if (code == COND_EXPR)
3206 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3207 cval1, cval2, save_p)
3208 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3209 cval1, cval2, save_p)
3210 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3211 cval1, cval2, save_p));
3212 return 0;
3213
3214 case tcc_comparison:
3215 /* First see if we can handle the first operand, then the second. For
3216 the second operand, we know *CVAL1 can't be zero. It must be that
3217 one side of the comparison is each of the values; test for the
3218 case where this isn't true by failing if the two operands
3219 are the same. */
3220
3221 if (operand_equal_p (TREE_OPERAND (arg, 0),
3222 TREE_OPERAND (arg, 1), 0))
3223 return 0;
3224
3225 if (*cval1 == 0)
3226 *cval1 = TREE_OPERAND (arg, 0);
3227 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3228 ;
3229 else if (*cval2 == 0)
3230 *cval2 = TREE_OPERAND (arg, 0);
3231 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3232 ;
3233 else
3234 return 0;
3235
3236 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3237 ;
3238 else if (*cval2 == 0)
3239 *cval2 = TREE_OPERAND (arg, 1);
3240 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3241 ;
3242 else
3243 return 0;
3244
3245 return 1;
3246
3247 default:
3248 return 0;
3249 }
3250 }
3251 \f
3252 /* ARG is a tree that is known to contain just arithmetic operations and
3253 comparisons. Evaluate the operations in the tree substituting NEW0 for
3254 any occurrence of OLD0 as an operand of a comparison and likewise for
3255 NEW1 and OLD1. */
3256
3257 static tree
3258 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3259 tree old1, tree new1)
3260 {
3261 tree type = TREE_TYPE (arg);
3262 enum tree_code code = TREE_CODE (arg);
3263 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3264
3265 /* We can handle some of the tcc_expression cases here. */
3266 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3267 tclass = tcc_unary;
3268 else if (tclass == tcc_expression
3269 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3270 tclass = tcc_binary;
3271
3272 switch (tclass)
3273 {
3274 case tcc_unary:
3275 return fold_build1_loc (loc, code, type,
3276 eval_subst (loc, TREE_OPERAND (arg, 0),
3277 old0, new0, old1, new1));
3278
3279 case tcc_binary:
3280 return fold_build2_loc (loc, code, type,
3281 eval_subst (loc, TREE_OPERAND (arg, 0),
3282 old0, new0, old1, new1),
3283 eval_subst (loc, TREE_OPERAND (arg, 1),
3284 old0, new0, old1, new1));
3285
3286 case tcc_expression:
3287 switch (code)
3288 {
3289 case SAVE_EXPR:
3290 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3291 old1, new1);
3292
3293 case COMPOUND_EXPR:
3294 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3295 old1, new1);
3296
3297 case COND_EXPR:
3298 return fold_build3_loc (loc, code, type,
3299 eval_subst (loc, TREE_OPERAND (arg, 0),
3300 old0, new0, old1, new1),
3301 eval_subst (loc, TREE_OPERAND (arg, 1),
3302 old0, new0, old1, new1),
3303 eval_subst (loc, TREE_OPERAND (arg, 2),
3304 old0, new0, old1, new1));
3305 default:
3306 break;
3307 }
3308 /* Fall through - ??? */
3309
3310 case tcc_comparison:
3311 {
3312 tree arg0 = TREE_OPERAND (arg, 0);
3313 tree arg1 = TREE_OPERAND (arg, 1);
3314
3315 /* We need to check both for exact equality and tree equality. The
3316 former will be true if the operand has a side-effect. In that
3317 case, we know the operand occurred exactly once. */
3318
3319 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3320 arg0 = new0;
3321 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3322 arg0 = new1;
3323
3324 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3325 arg1 = new0;
3326 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3327 arg1 = new1;
3328
3329 return fold_build2_loc (loc, code, type, arg0, arg1);
3330 }
3331
3332 default:
3333 return arg;
3334 }
3335 }
3336 \f
3337 /* Return a tree for the case when the result of an expression is RESULT
3338 converted to TYPE and OMITTED was previously an operand of the expression
3339 but is now not needed (e.g., we folded OMITTED * 0).
3340
3341 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3342 the conversion of RESULT to TYPE. */
3343
3344 tree
3345 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3346 {
3347 tree t = fold_convert_loc (loc, type, result);
3348
3349 /* If the resulting operand is an empty statement, just return the omitted
3350 statement casted to void. */
3351 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3352 return build1_loc (loc, NOP_EXPR, void_type_node,
3353 fold_ignored_result (omitted));
3354
3355 if (TREE_SIDE_EFFECTS (omitted))
3356 return build2_loc (loc, COMPOUND_EXPR, type,
3357 fold_ignored_result (omitted), t);
3358
3359 return non_lvalue_loc (loc, t);
3360 }
3361
3362 /* Return a tree for the case when the result of an expression is RESULT
3363 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3364 of the expression but are now not needed.
3365
3366 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3367 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3368 evaluated before OMITTED2. Otherwise, if neither has side effects,
3369 just do the conversion of RESULT to TYPE. */
3370
3371 tree
3372 omit_two_operands_loc (location_t loc, tree type, tree result,
3373 tree omitted1, tree omitted2)
3374 {
3375 tree t = fold_convert_loc (loc, type, result);
3376
3377 if (TREE_SIDE_EFFECTS (omitted2))
3378 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3379 if (TREE_SIDE_EFFECTS (omitted1))
3380 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3381
3382 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3383 }
3384
3385 \f
3386 /* Return a simplified tree node for the truth-negation of ARG. This
3387 never alters ARG itself. We assume that ARG is an operation that
3388 returns a truth value (0 or 1).
3389
3390 FIXME: one would think we would fold the result, but it causes
3391 problems with the dominator optimizer. */
3392
3393 static tree
3394 fold_truth_not_expr (location_t loc, tree arg)
3395 {
3396 tree type = TREE_TYPE (arg);
3397 enum tree_code code = TREE_CODE (arg);
3398 location_t loc1, loc2;
3399
3400 /* If this is a comparison, we can simply invert it, except for
3401 floating-point non-equality comparisons, in which case we just
3402 enclose a TRUTH_NOT_EXPR around what we have. */
3403
3404 if (TREE_CODE_CLASS (code) == tcc_comparison)
3405 {
3406 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3407 if (FLOAT_TYPE_P (op_type)
3408 && flag_trapping_math
3409 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3410 && code != NE_EXPR && code != EQ_EXPR)
3411 return NULL_TREE;
3412
3413 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3414 if (code == ERROR_MARK)
3415 return NULL_TREE;
3416
3417 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3418 TREE_OPERAND (arg, 1));
3419 }
3420
3421 switch (code)
3422 {
3423 case INTEGER_CST:
3424 return constant_boolean_node (integer_zerop (arg), type);
3425
3426 case TRUTH_AND_EXPR:
3427 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3428 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3429 return build2_loc (loc, TRUTH_OR_EXPR, type,
3430 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3431 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3432
3433 case TRUTH_OR_EXPR:
3434 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3435 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3436 return build2_loc (loc, TRUTH_AND_EXPR, type,
3437 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3438 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3439
3440 case TRUTH_XOR_EXPR:
3441 /* Here we can invert either operand. We invert the first operand
3442 unless the second operand is a TRUTH_NOT_EXPR in which case our
3443 result is the XOR of the first operand with the inside of the
3444 negation of the second operand. */
3445
3446 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3447 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3448 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3449 else
3450 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3451 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3452 TREE_OPERAND (arg, 1));
3453
3454 case TRUTH_ANDIF_EXPR:
3455 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3456 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3457 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3458 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3459 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3460
3461 case TRUTH_ORIF_EXPR:
3462 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3463 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3464 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3465 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3466 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3467
3468 case TRUTH_NOT_EXPR:
3469 return TREE_OPERAND (arg, 0);
3470
3471 case COND_EXPR:
3472 {
3473 tree arg1 = TREE_OPERAND (arg, 1);
3474 tree arg2 = TREE_OPERAND (arg, 2);
3475
3476 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3477 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3478
3479 /* A COND_EXPR may have a throw as one operand, which
3480 then has void type. Just leave void operands
3481 as they are. */
3482 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3483 VOID_TYPE_P (TREE_TYPE (arg1))
3484 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3485 VOID_TYPE_P (TREE_TYPE (arg2))
3486 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3487 }
3488
3489 case COMPOUND_EXPR:
3490 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3491 return build2_loc (loc, COMPOUND_EXPR, type,
3492 TREE_OPERAND (arg, 0),
3493 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3494
3495 case NON_LVALUE_EXPR:
3496 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3497 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3498
3499 CASE_CONVERT:
3500 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3501 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3502
3503 /* ... fall through ... */
3504
3505 case FLOAT_EXPR:
3506 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3507 return build1_loc (loc, TREE_CODE (arg), type,
3508 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3509
3510 case BIT_AND_EXPR:
3511 if (!integer_onep (TREE_OPERAND (arg, 1)))
3512 return NULL_TREE;
3513 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3514
3515 case SAVE_EXPR:
3516 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3517
3518 case CLEANUP_POINT_EXPR:
3519 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3520 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3521 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3522
3523 default:
3524 return NULL_TREE;
3525 }
3526 }
3527
3528 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3529 assume that ARG is an operation that returns a truth value (0 or 1
3530 for scalars, 0 or -1 for vectors). Return the folded expression if
3531 folding is successful. Otherwise, return NULL_TREE. */
3532
3533 static tree
3534 fold_invert_truthvalue (location_t loc, tree arg)
3535 {
3536 tree type = TREE_TYPE (arg);
3537 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3538 ? BIT_NOT_EXPR
3539 : TRUTH_NOT_EXPR,
3540 type, arg);
3541 }
3542
3543 /* Return a simplified tree node for the truth-negation of ARG. This
3544 never alters ARG itself. We assume that ARG is an operation that
3545 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3546
3547 tree
3548 invert_truthvalue_loc (location_t loc, tree arg)
3549 {
3550 if (TREE_CODE (arg) == ERROR_MARK)
3551 return arg;
3552
3553 tree type = TREE_TYPE (arg);
3554 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3555 ? BIT_NOT_EXPR
3556 : TRUTH_NOT_EXPR,
3557 type, arg);
3558 }
3559
3560 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3561 operands are another bit-wise operation with a common input. If so,
3562 distribute the bit operations to save an operation and possibly two if
3563 constants are involved. For example, convert
3564 (A | B) & (A | C) into A | (B & C)
3565 Further simplification will occur if B and C are constants.
3566
3567 If this optimization cannot be done, 0 will be returned. */
3568
3569 static tree
3570 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3571 tree arg0, tree arg1)
3572 {
3573 tree common;
3574 tree left, right;
3575
3576 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3577 || TREE_CODE (arg0) == code
3578 || (TREE_CODE (arg0) != BIT_AND_EXPR
3579 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3580 return 0;
3581
3582 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3583 {
3584 common = TREE_OPERAND (arg0, 0);
3585 left = TREE_OPERAND (arg0, 1);
3586 right = TREE_OPERAND (arg1, 1);
3587 }
3588 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3589 {
3590 common = TREE_OPERAND (arg0, 0);
3591 left = TREE_OPERAND (arg0, 1);
3592 right = TREE_OPERAND (arg1, 0);
3593 }
3594 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3595 {
3596 common = TREE_OPERAND (arg0, 1);
3597 left = TREE_OPERAND (arg0, 0);
3598 right = TREE_OPERAND (arg1, 1);
3599 }
3600 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3601 {
3602 common = TREE_OPERAND (arg0, 1);
3603 left = TREE_OPERAND (arg0, 0);
3604 right = TREE_OPERAND (arg1, 0);
3605 }
3606 else
3607 return 0;
3608
3609 common = fold_convert_loc (loc, type, common);
3610 left = fold_convert_loc (loc, type, left);
3611 right = fold_convert_loc (loc, type, right);
3612 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3613 fold_build2_loc (loc, code, type, left, right));
3614 }
3615
3616 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3617 with code CODE. This optimization is unsafe. */
3618 static tree
3619 distribute_real_division (location_t loc, enum tree_code code, tree type,
3620 tree arg0, tree arg1)
3621 {
3622 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3623 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3624
3625 /* (A / C) +- (B / C) -> (A +- B) / C. */
3626 if (mul0 == mul1
3627 && operand_equal_p (TREE_OPERAND (arg0, 1),
3628 TREE_OPERAND (arg1, 1), 0))
3629 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3630 fold_build2_loc (loc, code, type,
3631 TREE_OPERAND (arg0, 0),
3632 TREE_OPERAND (arg1, 0)),
3633 TREE_OPERAND (arg0, 1));
3634
3635 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3636 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3637 TREE_OPERAND (arg1, 0), 0)
3638 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3639 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3640 {
3641 REAL_VALUE_TYPE r0, r1;
3642 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3643 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3644 if (!mul0)
3645 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3646 if (!mul1)
3647 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3648 real_arithmetic (&r0, code, &r0, &r1);
3649 return fold_build2_loc (loc, MULT_EXPR, type,
3650 TREE_OPERAND (arg0, 0),
3651 build_real (type, r0));
3652 }
3653
3654 return NULL_TREE;
3655 }
3656 \f
3657 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3658 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3659
3660 static tree
3661 make_bit_field_ref (location_t loc, tree inner, tree type,
3662 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3663 {
3664 tree result, bftype;
3665
3666 if (bitpos == 0)
3667 {
3668 tree size = TYPE_SIZE (TREE_TYPE (inner));
3669 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3670 || POINTER_TYPE_P (TREE_TYPE (inner)))
3671 && tree_fits_shwi_p (size)
3672 && tree_to_shwi (size) == bitsize)
3673 return fold_convert_loc (loc, type, inner);
3674 }
3675
3676 bftype = type;
3677 if (TYPE_PRECISION (bftype) != bitsize
3678 || TYPE_UNSIGNED (bftype) == !unsignedp)
3679 bftype = build_nonstandard_integer_type (bitsize, 0);
3680
3681 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3682 size_int (bitsize), bitsize_int (bitpos));
3683
3684 if (bftype != type)
3685 result = fold_convert_loc (loc, type, result);
3686
3687 return result;
3688 }
3689
3690 /* Optimize a bit-field compare.
3691
3692 There are two cases: First is a compare against a constant and the
3693 second is a comparison of two items where the fields are at the same
3694 bit position relative to the start of a chunk (byte, halfword, word)
3695 large enough to contain it. In these cases we can avoid the shift
3696 implicit in bitfield extractions.
3697
3698 For constants, we emit a compare of the shifted constant with the
3699 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3700 compared. For two fields at the same position, we do the ANDs with the
3701 similar mask and compare the result of the ANDs.
3702
3703 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3704 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3705 are the left and right operands of the comparison, respectively.
3706
3707 If the optimization described above can be done, we return the resulting
3708 tree. Otherwise we return zero. */
3709
3710 static tree
3711 optimize_bit_field_compare (location_t loc, enum tree_code code,
3712 tree compare_type, tree lhs, tree rhs)
3713 {
3714 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3715 tree type = TREE_TYPE (lhs);
3716 tree unsigned_type;
3717 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3718 machine_mode lmode, rmode, nmode;
3719 int lunsignedp, runsignedp;
3720 int lvolatilep = 0, rvolatilep = 0;
3721 tree linner, rinner = NULL_TREE;
3722 tree mask;
3723 tree offset;
3724
3725 /* Get all the information about the extractions being done. If the bit size
3726 if the same as the size of the underlying object, we aren't doing an
3727 extraction at all and so can do nothing. We also don't want to
3728 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3729 then will no longer be able to replace it. */
3730 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3731 &lunsignedp, &lvolatilep, false);
3732 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3733 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3734 return 0;
3735
3736 if (!const_p)
3737 {
3738 /* If this is not a constant, we can only do something if bit positions,
3739 sizes, and signedness are the same. */
3740 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3741 &runsignedp, &rvolatilep, false);
3742
3743 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3744 || lunsignedp != runsignedp || offset != 0
3745 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3746 return 0;
3747 }
3748
3749 /* See if we can find a mode to refer to this field. We should be able to,
3750 but fail if we can't. */
3751 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3752 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3753 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3754 TYPE_ALIGN (TREE_TYPE (rinner))),
3755 word_mode, false);
3756 if (nmode == VOIDmode)
3757 return 0;
3758
3759 /* Set signed and unsigned types of the precision of this mode for the
3760 shifts below. */
3761 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3762
3763 /* Compute the bit position and size for the new reference and our offset
3764 within it. If the new reference is the same size as the original, we
3765 won't optimize anything, so return zero. */
3766 nbitsize = GET_MODE_BITSIZE (nmode);
3767 nbitpos = lbitpos & ~ (nbitsize - 1);
3768 lbitpos -= nbitpos;
3769 if (nbitsize == lbitsize)
3770 return 0;
3771
3772 if (BYTES_BIG_ENDIAN)
3773 lbitpos = nbitsize - lbitsize - lbitpos;
3774
3775 /* Make the mask to be used against the extracted field. */
3776 mask = build_int_cst_type (unsigned_type, -1);
3777 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3778 mask = const_binop (RSHIFT_EXPR, mask,
3779 size_int (nbitsize - lbitsize - lbitpos));
3780
3781 if (! const_p)
3782 /* If not comparing with constant, just rework the comparison
3783 and return. */
3784 return fold_build2_loc (loc, code, compare_type,
3785 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3786 make_bit_field_ref (loc, linner,
3787 unsigned_type,
3788 nbitsize, nbitpos,
3789 1),
3790 mask),
3791 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3792 make_bit_field_ref (loc, rinner,
3793 unsigned_type,
3794 nbitsize, nbitpos,
3795 1),
3796 mask));
3797
3798 /* Otherwise, we are handling the constant case. See if the constant is too
3799 big for the field. Warn and return a tree of for 0 (false) if so. We do
3800 this not only for its own sake, but to avoid having to test for this
3801 error case below. If we didn't, we might generate wrong code.
3802
3803 For unsigned fields, the constant shifted right by the field length should
3804 be all zero. For signed fields, the high-order bits should agree with
3805 the sign bit. */
3806
3807 if (lunsignedp)
3808 {
3809 if (wi::lrshift (rhs, lbitsize) != 0)
3810 {
3811 warning (0, "comparison is always %d due to width of bit-field",
3812 code == NE_EXPR);
3813 return constant_boolean_node (code == NE_EXPR, compare_type);
3814 }
3815 }
3816 else
3817 {
3818 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3819 if (tem != 0 && tem != -1)
3820 {
3821 warning (0, "comparison is always %d due to width of bit-field",
3822 code == NE_EXPR);
3823 return constant_boolean_node (code == NE_EXPR, compare_type);
3824 }
3825 }
3826
3827 /* Single-bit compares should always be against zero. */
3828 if (lbitsize == 1 && ! integer_zerop (rhs))
3829 {
3830 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3831 rhs = build_int_cst (type, 0);
3832 }
3833
3834 /* Make a new bitfield reference, shift the constant over the
3835 appropriate number of bits and mask it with the computed mask
3836 (in case this was a signed field). If we changed it, make a new one. */
3837 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3838
3839 rhs = const_binop (BIT_AND_EXPR,
3840 const_binop (LSHIFT_EXPR,
3841 fold_convert_loc (loc, unsigned_type, rhs),
3842 size_int (lbitpos)),
3843 mask);
3844
3845 lhs = build2_loc (loc, code, compare_type,
3846 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3847 return lhs;
3848 }
3849 \f
3850 /* Subroutine for fold_truth_andor_1: decode a field reference.
3851
3852 If EXP is a comparison reference, we return the innermost reference.
3853
3854 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3855 set to the starting bit number.
3856
3857 If the innermost field can be completely contained in a mode-sized
3858 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3859
3860 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3861 otherwise it is not changed.
3862
3863 *PUNSIGNEDP is set to the signedness of the field.
3864
3865 *PMASK is set to the mask used. This is either contained in a
3866 BIT_AND_EXPR or derived from the width of the field.
3867
3868 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3869
3870 Return 0 if this is not a component reference or is one that we can't
3871 do anything with. */
3872
3873 static tree
3874 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3875 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3876 int *punsignedp, int *pvolatilep,
3877 tree *pmask, tree *pand_mask)
3878 {
3879 tree outer_type = 0;
3880 tree and_mask = 0;
3881 tree mask, inner, offset;
3882 tree unsigned_type;
3883 unsigned int precision;
3884
3885 /* All the optimizations using this function assume integer fields.
3886 There are problems with FP fields since the type_for_size call
3887 below can fail for, e.g., XFmode. */
3888 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3889 return 0;
3890
3891 /* We are interested in the bare arrangement of bits, so strip everything
3892 that doesn't affect the machine mode. However, record the type of the
3893 outermost expression if it may matter below. */
3894 if (CONVERT_EXPR_P (exp)
3895 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3896 outer_type = TREE_TYPE (exp);
3897 STRIP_NOPS (exp);
3898
3899 if (TREE_CODE (exp) == BIT_AND_EXPR)
3900 {
3901 and_mask = TREE_OPERAND (exp, 1);
3902 exp = TREE_OPERAND (exp, 0);
3903 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3904 if (TREE_CODE (and_mask) != INTEGER_CST)
3905 return 0;
3906 }
3907
3908 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3909 punsignedp, pvolatilep, false);
3910 if ((inner == exp && and_mask == 0)
3911 || *pbitsize < 0 || offset != 0
3912 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3913 return 0;
3914
3915 /* If the number of bits in the reference is the same as the bitsize of
3916 the outer type, then the outer type gives the signedness. Otherwise
3917 (in case of a small bitfield) the signedness is unchanged. */
3918 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3919 *punsignedp = TYPE_UNSIGNED (outer_type);
3920
3921 /* Compute the mask to access the bitfield. */
3922 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3923 precision = TYPE_PRECISION (unsigned_type);
3924
3925 mask = build_int_cst_type (unsigned_type, -1);
3926
3927 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3928 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3929
3930 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3931 if (and_mask != 0)
3932 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3933 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3934
3935 *pmask = mask;
3936 *pand_mask = and_mask;
3937 return inner;
3938 }
3939
3940 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3941 bit positions and MASK is SIGNED. */
3942
3943 static int
3944 all_ones_mask_p (const_tree mask, unsigned int size)
3945 {
3946 tree type = TREE_TYPE (mask);
3947 unsigned int precision = TYPE_PRECISION (type);
3948
3949 /* If this function returns true when the type of the mask is
3950 UNSIGNED, then there will be errors. In particular see
3951 gcc.c-torture/execute/990326-1.c. There does not appear to be
3952 any documentation paper trail as to why this is so. But the pre
3953 wide-int worked with that restriction and it has been preserved
3954 here. */
3955 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3956 return false;
3957
3958 return wi::mask (size, false, precision) == mask;
3959 }
3960
3961 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3962 represents the sign bit of EXP's type. If EXP represents a sign
3963 or zero extension, also test VAL against the unextended type.
3964 The return value is the (sub)expression whose sign bit is VAL,
3965 or NULL_TREE otherwise. */
3966
3967 tree
3968 sign_bit_p (tree exp, const_tree val)
3969 {
3970 int width;
3971 tree t;
3972
3973 /* Tree EXP must have an integral type. */
3974 t = TREE_TYPE (exp);
3975 if (! INTEGRAL_TYPE_P (t))
3976 return NULL_TREE;
3977
3978 /* Tree VAL must be an integer constant. */
3979 if (TREE_CODE (val) != INTEGER_CST
3980 || TREE_OVERFLOW (val))
3981 return NULL_TREE;
3982
3983 width = TYPE_PRECISION (t);
3984 if (wi::only_sign_bit_p (val, width))
3985 return exp;
3986
3987 /* Handle extension from a narrower type. */
3988 if (TREE_CODE (exp) == NOP_EXPR
3989 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3990 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3991
3992 return NULL_TREE;
3993 }
3994
3995 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3996 to be evaluated unconditionally. */
3997
3998 static int
3999 simple_operand_p (const_tree exp)
4000 {
4001 /* Strip any conversions that don't change the machine mode. */
4002 STRIP_NOPS (exp);
4003
4004 return (CONSTANT_CLASS_P (exp)
4005 || TREE_CODE (exp) == SSA_NAME
4006 || (DECL_P (exp)
4007 && ! TREE_ADDRESSABLE (exp)
4008 && ! TREE_THIS_VOLATILE (exp)
4009 && ! DECL_NONLOCAL (exp)
4010 /* Don't regard global variables as simple. They may be
4011 allocated in ways unknown to the compiler (shared memory,
4012 #pragma weak, etc). */
4013 && ! TREE_PUBLIC (exp)
4014 && ! DECL_EXTERNAL (exp)
4015 /* Weakrefs are not safe to be read, since they can be NULL.
4016 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4017 have DECL_WEAK flag set. */
4018 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4019 /* Loading a static variable is unduly expensive, but global
4020 registers aren't expensive. */
4021 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4022 }
4023
4024 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4025 to be evaluated unconditionally.
4026 I addition to simple_operand_p, we assume that comparisons, conversions,
4027 and logic-not operations are simple, if their operands are simple, too. */
4028
4029 static bool
4030 simple_operand_p_2 (tree exp)
4031 {
4032 enum tree_code code;
4033
4034 if (TREE_SIDE_EFFECTS (exp)
4035 || tree_could_trap_p (exp))
4036 return false;
4037
4038 while (CONVERT_EXPR_P (exp))
4039 exp = TREE_OPERAND (exp, 0);
4040
4041 code = TREE_CODE (exp);
4042
4043 if (TREE_CODE_CLASS (code) == tcc_comparison)
4044 return (simple_operand_p (TREE_OPERAND (exp, 0))
4045 && simple_operand_p (TREE_OPERAND (exp, 1)));
4046
4047 if (code == TRUTH_NOT_EXPR)
4048 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4049
4050 return simple_operand_p (exp);
4051 }
4052
4053 \f
4054 /* The following functions are subroutines to fold_range_test and allow it to
4055 try to change a logical combination of comparisons into a range test.
4056
4057 For example, both
4058 X == 2 || X == 3 || X == 4 || X == 5
4059 and
4060 X >= 2 && X <= 5
4061 are converted to
4062 (unsigned) (X - 2) <= 3
4063
4064 We describe each set of comparisons as being either inside or outside
4065 a range, using a variable named like IN_P, and then describe the
4066 range with a lower and upper bound. If one of the bounds is omitted,
4067 it represents either the highest or lowest value of the type.
4068
4069 In the comments below, we represent a range by two numbers in brackets
4070 preceded by a "+" to designate being inside that range, or a "-" to
4071 designate being outside that range, so the condition can be inverted by
4072 flipping the prefix. An omitted bound is represented by a "-". For
4073 example, "- [-, 10]" means being outside the range starting at the lowest
4074 possible value and ending at 10, in other words, being greater than 10.
4075 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4076 always false.
4077
4078 We set up things so that the missing bounds are handled in a consistent
4079 manner so neither a missing bound nor "true" and "false" need to be
4080 handled using a special case. */
4081
4082 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4083 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4084 and UPPER1_P are nonzero if the respective argument is an upper bound
4085 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4086 must be specified for a comparison. ARG1 will be converted to ARG0's
4087 type if both are specified. */
4088
4089 static tree
4090 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4091 tree arg1, int upper1_p)
4092 {
4093 tree tem;
4094 int result;
4095 int sgn0, sgn1;
4096
4097 /* If neither arg represents infinity, do the normal operation.
4098 Else, if not a comparison, return infinity. Else handle the special
4099 comparison rules. Note that most of the cases below won't occur, but
4100 are handled for consistency. */
4101
4102 if (arg0 != 0 && arg1 != 0)
4103 {
4104 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4105 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4106 STRIP_NOPS (tem);
4107 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4108 }
4109
4110 if (TREE_CODE_CLASS (code) != tcc_comparison)
4111 return 0;
4112
4113 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4114 for neither. In real maths, we cannot assume open ended ranges are
4115 the same. But, this is computer arithmetic, where numbers are finite.
4116 We can therefore make the transformation of any unbounded range with
4117 the value Z, Z being greater than any representable number. This permits
4118 us to treat unbounded ranges as equal. */
4119 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4120 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4121 switch (code)
4122 {
4123 case EQ_EXPR:
4124 result = sgn0 == sgn1;
4125 break;
4126 case NE_EXPR:
4127 result = sgn0 != sgn1;
4128 break;
4129 case LT_EXPR:
4130 result = sgn0 < sgn1;
4131 break;
4132 case LE_EXPR:
4133 result = sgn0 <= sgn1;
4134 break;
4135 case GT_EXPR:
4136 result = sgn0 > sgn1;
4137 break;
4138 case GE_EXPR:
4139 result = sgn0 >= sgn1;
4140 break;
4141 default:
4142 gcc_unreachable ();
4143 }
4144
4145 return constant_boolean_node (result, type);
4146 }
4147 \f
4148 /* Helper routine for make_range. Perform one step for it, return
4149 new expression if the loop should continue or NULL_TREE if it should
4150 stop. */
4151
4152 tree
4153 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4154 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4155 bool *strict_overflow_p)
4156 {
4157 tree arg0_type = TREE_TYPE (arg0);
4158 tree n_low, n_high, low = *p_low, high = *p_high;
4159 int in_p = *p_in_p, n_in_p;
4160
4161 switch (code)
4162 {
4163 case TRUTH_NOT_EXPR:
4164 /* We can only do something if the range is testing for zero. */
4165 if (low == NULL_TREE || high == NULL_TREE
4166 || ! integer_zerop (low) || ! integer_zerop (high))
4167 return NULL_TREE;
4168 *p_in_p = ! in_p;
4169 return arg0;
4170
4171 case EQ_EXPR: case NE_EXPR:
4172 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4173 /* We can only do something if the range is testing for zero
4174 and if the second operand is an integer constant. Note that
4175 saying something is "in" the range we make is done by
4176 complementing IN_P since it will set in the initial case of
4177 being not equal to zero; "out" is leaving it alone. */
4178 if (low == NULL_TREE || high == NULL_TREE
4179 || ! integer_zerop (low) || ! integer_zerop (high)
4180 || TREE_CODE (arg1) != INTEGER_CST)
4181 return NULL_TREE;
4182
4183 switch (code)
4184 {
4185 case NE_EXPR: /* - [c, c] */
4186 low = high = arg1;
4187 break;
4188 case EQ_EXPR: /* + [c, c] */
4189 in_p = ! in_p, low = high = arg1;
4190 break;
4191 case GT_EXPR: /* - [-, c] */
4192 low = 0, high = arg1;
4193 break;
4194 case GE_EXPR: /* + [c, -] */
4195 in_p = ! in_p, low = arg1, high = 0;
4196 break;
4197 case LT_EXPR: /* - [c, -] */
4198 low = arg1, high = 0;
4199 break;
4200 case LE_EXPR: /* + [-, c] */
4201 in_p = ! in_p, low = 0, high = arg1;
4202 break;
4203 default:
4204 gcc_unreachable ();
4205 }
4206
4207 /* If this is an unsigned comparison, we also know that EXP is
4208 greater than or equal to zero. We base the range tests we make
4209 on that fact, so we record it here so we can parse existing
4210 range tests. We test arg0_type since often the return type
4211 of, e.g. EQ_EXPR, is boolean. */
4212 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4213 {
4214 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4215 in_p, low, high, 1,
4216 build_int_cst (arg0_type, 0),
4217 NULL_TREE))
4218 return NULL_TREE;
4219
4220 in_p = n_in_p, low = n_low, high = n_high;
4221
4222 /* If the high bound is missing, but we have a nonzero low
4223 bound, reverse the range so it goes from zero to the low bound
4224 minus 1. */
4225 if (high == 0 && low && ! integer_zerop (low))
4226 {
4227 in_p = ! in_p;
4228 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4229 build_int_cst (TREE_TYPE (low), 1), 0);
4230 low = build_int_cst (arg0_type, 0);
4231 }
4232 }
4233
4234 *p_low = low;
4235 *p_high = high;
4236 *p_in_p = in_p;
4237 return arg0;
4238
4239 case NEGATE_EXPR:
4240 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4241 low and high are non-NULL, then normalize will DTRT. */
4242 if (!TYPE_UNSIGNED (arg0_type)
4243 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4244 {
4245 if (low == NULL_TREE)
4246 low = TYPE_MIN_VALUE (arg0_type);
4247 if (high == NULL_TREE)
4248 high = TYPE_MAX_VALUE (arg0_type);
4249 }
4250
4251 /* (-x) IN [a,b] -> x in [-b, -a] */
4252 n_low = range_binop (MINUS_EXPR, exp_type,
4253 build_int_cst (exp_type, 0),
4254 0, high, 1);
4255 n_high = range_binop (MINUS_EXPR, exp_type,
4256 build_int_cst (exp_type, 0),
4257 0, low, 0);
4258 if (n_high != 0 && TREE_OVERFLOW (n_high))
4259 return NULL_TREE;
4260 goto normalize;
4261
4262 case BIT_NOT_EXPR:
4263 /* ~ X -> -X - 1 */
4264 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4265 build_int_cst (exp_type, 1));
4266
4267 case PLUS_EXPR:
4268 case MINUS_EXPR:
4269 if (TREE_CODE (arg1) != INTEGER_CST)
4270 return NULL_TREE;
4271
4272 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4273 move a constant to the other side. */
4274 if (!TYPE_UNSIGNED (arg0_type)
4275 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4276 return NULL_TREE;
4277
4278 /* If EXP is signed, any overflow in the computation is undefined,
4279 so we don't worry about it so long as our computations on
4280 the bounds don't overflow. For unsigned, overflow is defined
4281 and this is exactly the right thing. */
4282 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4283 arg0_type, low, 0, arg1, 0);
4284 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, high, 1, arg1, 0);
4286 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4287 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4288 return NULL_TREE;
4289
4290 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4291 *strict_overflow_p = true;
4292
4293 normalize:
4294 /* Check for an unsigned range which has wrapped around the maximum
4295 value thus making n_high < n_low, and normalize it. */
4296 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4297 {
4298 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4299 build_int_cst (TREE_TYPE (n_high), 1), 0);
4300 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4301 build_int_cst (TREE_TYPE (n_low), 1), 0);
4302
4303 /* If the range is of the form +/- [ x+1, x ], we won't
4304 be able to normalize it. But then, it represents the
4305 whole range or the empty set, so make it
4306 +/- [ -, - ]. */
4307 if (tree_int_cst_equal (n_low, low)
4308 && tree_int_cst_equal (n_high, high))
4309 low = high = 0;
4310 else
4311 in_p = ! in_p;
4312 }
4313 else
4314 low = n_low, high = n_high;
4315
4316 *p_low = low;
4317 *p_high = high;
4318 *p_in_p = in_p;
4319 return arg0;
4320
4321 CASE_CONVERT:
4322 case NON_LVALUE_EXPR:
4323 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4324 return NULL_TREE;
4325
4326 if (! INTEGRAL_TYPE_P (arg0_type)
4327 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4328 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4329 return NULL_TREE;
4330
4331 n_low = low, n_high = high;
4332
4333 if (n_low != 0)
4334 n_low = fold_convert_loc (loc, arg0_type, n_low);
4335
4336 if (n_high != 0)
4337 n_high = fold_convert_loc (loc, arg0_type, n_high);
4338
4339 /* If we're converting arg0 from an unsigned type, to exp,
4340 a signed type, we will be doing the comparison as unsigned.
4341 The tests above have already verified that LOW and HIGH
4342 are both positive.
4343
4344 So we have to ensure that we will handle large unsigned
4345 values the same way that the current signed bounds treat
4346 negative values. */
4347
4348 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4349 {
4350 tree high_positive;
4351 tree equiv_type;
4352 /* For fixed-point modes, we need to pass the saturating flag
4353 as the 2nd parameter. */
4354 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4355 equiv_type
4356 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4357 TYPE_SATURATING (arg0_type));
4358 else
4359 equiv_type
4360 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4361
4362 /* A range without an upper bound is, naturally, unbounded.
4363 Since convert would have cropped a very large value, use
4364 the max value for the destination type. */
4365 high_positive
4366 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4367 : TYPE_MAX_VALUE (arg0_type);
4368
4369 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4370 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4371 fold_convert_loc (loc, arg0_type,
4372 high_positive),
4373 build_int_cst (arg0_type, 1));
4374
4375 /* If the low bound is specified, "and" the range with the
4376 range for which the original unsigned value will be
4377 positive. */
4378 if (low != 0)
4379 {
4380 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4381 1, fold_convert_loc (loc, arg0_type,
4382 integer_zero_node),
4383 high_positive))
4384 return NULL_TREE;
4385
4386 in_p = (n_in_p == in_p);
4387 }
4388 else
4389 {
4390 /* Otherwise, "or" the range with the range of the input
4391 that will be interpreted as negative. */
4392 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4393 1, fold_convert_loc (loc, arg0_type,
4394 integer_zero_node),
4395 high_positive))
4396 return NULL_TREE;
4397
4398 in_p = (in_p != n_in_p);
4399 }
4400 }
4401
4402 *p_low = n_low;
4403 *p_high = n_high;
4404 *p_in_p = in_p;
4405 return arg0;
4406
4407 default:
4408 return NULL_TREE;
4409 }
4410 }
4411
4412 /* Given EXP, a logical expression, set the range it is testing into
4413 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4414 actually being tested. *PLOW and *PHIGH will be made of the same
4415 type as the returned expression. If EXP is not a comparison, we
4416 will most likely not be returning a useful value and range. Set
4417 *STRICT_OVERFLOW_P to true if the return value is only valid
4418 because signed overflow is undefined; otherwise, do not change
4419 *STRICT_OVERFLOW_P. */
4420
4421 tree
4422 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4423 bool *strict_overflow_p)
4424 {
4425 enum tree_code code;
4426 tree arg0, arg1 = NULL_TREE;
4427 tree exp_type, nexp;
4428 int in_p;
4429 tree low, high;
4430 location_t loc = EXPR_LOCATION (exp);
4431
4432 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4433 and see if we can refine the range. Some of the cases below may not
4434 happen, but it doesn't seem worth worrying about this. We "continue"
4435 the outer loop when we've changed something; otherwise we "break"
4436 the switch, which will "break" the while. */
4437
4438 in_p = 0;
4439 low = high = build_int_cst (TREE_TYPE (exp), 0);
4440
4441 while (1)
4442 {
4443 code = TREE_CODE (exp);
4444 exp_type = TREE_TYPE (exp);
4445 arg0 = NULL_TREE;
4446
4447 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4448 {
4449 if (TREE_OPERAND_LENGTH (exp) > 0)
4450 arg0 = TREE_OPERAND (exp, 0);
4451 if (TREE_CODE_CLASS (code) == tcc_binary
4452 || TREE_CODE_CLASS (code) == tcc_comparison
4453 || (TREE_CODE_CLASS (code) == tcc_expression
4454 && TREE_OPERAND_LENGTH (exp) > 1))
4455 arg1 = TREE_OPERAND (exp, 1);
4456 }
4457 if (arg0 == NULL_TREE)
4458 break;
4459
4460 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4461 &high, &in_p, strict_overflow_p);
4462 if (nexp == NULL_TREE)
4463 break;
4464 exp = nexp;
4465 }
4466
4467 /* If EXP is a constant, we can evaluate whether this is true or false. */
4468 if (TREE_CODE (exp) == INTEGER_CST)
4469 {
4470 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4471 exp, 0, low, 0))
4472 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4473 exp, 1, high, 1)));
4474 low = high = 0;
4475 exp = 0;
4476 }
4477
4478 *pin_p = in_p, *plow = low, *phigh = high;
4479 return exp;
4480 }
4481 \f
4482 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4483 type, TYPE, return an expression to test if EXP is in (or out of, depending
4484 on IN_P) the range. Return 0 if the test couldn't be created. */
4485
4486 tree
4487 build_range_check (location_t loc, tree type, tree exp, int in_p,
4488 tree low, tree high)
4489 {
4490 tree etype = TREE_TYPE (exp), value;
4491
4492 #ifdef HAVE_canonicalize_funcptr_for_compare
4493 /* Disable this optimization for function pointer expressions
4494 on targets that require function pointer canonicalization. */
4495 if (HAVE_canonicalize_funcptr_for_compare
4496 && TREE_CODE (etype) == POINTER_TYPE
4497 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4498 return NULL_TREE;
4499 #endif
4500
4501 if (! in_p)
4502 {
4503 value = build_range_check (loc, type, exp, 1, low, high);
4504 if (value != 0)
4505 return invert_truthvalue_loc (loc, value);
4506
4507 return 0;
4508 }
4509
4510 if (low == 0 && high == 0)
4511 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4512
4513 if (low == 0)
4514 return fold_build2_loc (loc, LE_EXPR, type, exp,
4515 fold_convert_loc (loc, etype, high));
4516
4517 if (high == 0)
4518 return fold_build2_loc (loc, GE_EXPR, type, exp,
4519 fold_convert_loc (loc, etype, low));
4520
4521 if (operand_equal_p (low, high, 0))
4522 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4523 fold_convert_loc (loc, etype, low));
4524
4525 if (integer_zerop (low))
4526 {
4527 if (! TYPE_UNSIGNED (etype))
4528 {
4529 etype = unsigned_type_for (etype);
4530 high = fold_convert_loc (loc, etype, high);
4531 exp = fold_convert_loc (loc, etype, exp);
4532 }
4533 return build_range_check (loc, type, exp, 1, 0, high);
4534 }
4535
4536 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4537 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4538 {
4539 int prec = TYPE_PRECISION (etype);
4540
4541 if (wi::mask (prec - 1, false, prec) == high)
4542 {
4543 if (TYPE_UNSIGNED (etype))
4544 {
4545 tree signed_etype = signed_type_for (etype);
4546 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4547 etype
4548 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4549 else
4550 etype = signed_etype;
4551 exp = fold_convert_loc (loc, etype, exp);
4552 }
4553 return fold_build2_loc (loc, GT_EXPR, type, exp,
4554 build_int_cst (etype, 0));
4555 }
4556 }
4557
4558 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4559 This requires wrap-around arithmetics for the type of the expression.
4560 First make sure that arithmetics in this type is valid, then make sure
4561 that it wraps around. */
4562 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4563 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4564 TYPE_UNSIGNED (etype));
4565
4566 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4567 {
4568 tree utype, minv, maxv;
4569
4570 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4571 for the type in question, as we rely on this here. */
4572 utype = unsigned_type_for (etype);
4573 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4574 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4575 build_int_cst (TREE_TYPE (maxv), 1), 1);
4576 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4577
4578 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4579 minv, 1, maxv, 1)))
4580 etype = utype;
4581 else
4582 return 0;
4583 }
4584
4585 high = fold_convert_loc (loc, etype, high);
4586 low = fold_convert_loc (loc, etype, low);
4587 exp = fold_convert_loc (loc, etype, exp);
4588
4589 value = const_binop (MINUS_EXPR, high, low);
4590
4591
4592 if (POINTER_TYPE_P (etype))
4593 {
4594 if (value != 0 && !TREE_OVERFLOW (value))
4595 {
4596 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4597 return build_range_check (loc, type,
4598 fold_build_pointer_plus_loc (loc, exp, low),
4599 1, build_int_cst (etype, 0), value);
4600 }
4601 return 0;
4602 }
4603
4604 if (value != 0 && !TREE_OVERFLOW (value))
4605 return build_range_check (loc, type,
4606 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4607 1, build_int_cst (etype, 0), value);
4608
4609 return 0;
4610 }
4611 \f
4612 /* Return the predecessor of VAL in its type, handling the infinite case. */
4613
4614 static tree
4615 range_predecessor (tree val)
4616 {
4617 tree type = TREE_TYPE (val);
4618
4619 if (INTEGRAL_TYPE_P (type)
4620 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4621 return 0;
4622 else
4623 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4624 build_int_cst (TREE_TYPE (val), 1), 0);
4625 }
4626
4627 /* Return the successor of VAL in its type, handling the infinite case. */
4628
4629 static tree
4630 range_successor (tree val)
4631 {
4632 tree type = TREE_TYPE (val);
4633
4634 if (INTEGRAL_TYPE_P (type)
4635 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4636 return 0;
4637 else
4638 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4639 build_int_cst (TREE_TYPE (val), 1), 0);
4640 }
4641
4642 /* Given two ranges, see if we can merge them into one. Return 1 if we
4643 can, 0 if we can't. Set the output range into the specified parameters. */
4644
4645 bool
4646 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4647 tree high0, int in1_p, tree low1, tree high1)
4648 {
4649 int no_overlap;
4650 int subset;
4651 int temp;
4652 tree tem;
4653 int in_p;
4654 tree low, high;
4655 int lowequal = ((low0 == 0 && low1 == 0)
4656 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4657 low0, 0, low1, 0)));
4658 int highequal = ((high0 == 0 && high1 == 0)
4659 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4660 high0, 1, high1, 1)));
4661
4662 /* Make range 0 be the range that starts first, or ends last if they
4663 start at the same value. Swap them if it isn't. */
4664 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4665 low0, 0, low1, 0))
4666 || (lowequal
4667 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4668 high1, 1, high0, 1))))
4669 {
4670 temp = in0_p, in0_p = in1_p, in1_p = temp;
4671 tem = low0, low0 = low1, low1 = tem;
4672 tem = high0, high0 = high1, high1 = tem;
4673 }
4674
4675 /* Now flag two cases, whether the ranges are disjoint or whether the
4676 second range is totally subsumed in the first. Note that the tests
4677 below are simplified by the ones above. */
4678 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4679 high0, 1, low1, 0));
4680 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4681 high1, 1, high0, 1));
4682
4683 /* We now have four cases, depending on whether we are including or
4684 excluding the two ranges. */
4685 if (in0_p && in1_p)
4686 {
4687 /* If they don't overlap, the result is false. If the second range
4688 is a subset it is the result. Otherwise, the range is from the start
4689 of the second to the end of the first. */
4690 if (no_overlap)
4691 in_p = 0, low = high = 0;
4692 else if (subset)
4693 in_p = 1, low = low1, high = high1;
4694 else
4695 in_p = 1, low = low1, high = high0;
4696 }
4697
4698 else if (in0_p && ! in1_p)
4699 {
4700 /* If they don't overlap, the result is the first range. If they are
4701 equal, the result is false. If the second range is a subset of the
4702 first, and the ranges begin at the same place, we go from just after
4703 the end of the second range to the end of the first. If the second
4704 range is not a subset of the first, or if it is a subset and both
4705 ranges end at the same place, the range starts at the start of the
4706 first range and ends just before the second range.
4707 Otherwise, we can't describe this as a single range. */
4708 if (no_overlap)
4709 in_p = 1, low = low0, high = high0;
4710 else if (lowequal && highequal)
4711 in_p = 0, low = high = 0;
4712 else if (subset && lowequal)
4713 {
4714 low = range_successor (high1);
4715 high = high0;
4716 in_p = 1;
4717 if (low == 0)
4718 {
4719 /* We are in the weird situation where high0 > high1 but
4720 high1 has no successor. Punt. */
4721 return 0;
4722 }
4723 }
4724 else if (! subset || highequal)
4725 {
4726 low = low0;
4727 high = range_predecessor (low1);
4728 in_p = 1;
4729 if (high == 0)
4730 {
4731 /* low0 < low1 but low1 has no predecessor. Punt. */
4732 return 0;
4733 }
4734 }
4735 else
4736 return 0;
4737 }
4738
4739 else if (! in0_p && in1_p)
4740 {
4741 /* If they don't overlap, the result is the second range. If the second
4742 is a subset of the first, the result is false. Otherwise,
4743 the range starts just after the first range and ends at the
4744 end of the second. */
4745 if (no_overlap)
4746 in_p = 1, low = low1, high = high1;
4747 else if (subset || highequal)
4748 in_p = 0, low = high = 0;
4749 else
4750 {
4751 low = range_successor (high0);
4752 high = high1;
4753 in_p = 1;
4754 if (low == 0)
4755 {
4756 /* high1 > high0 but high0 has no successor. Punt. */
4757 return 0;
4758 }
4759 }
4760 }
4761
4762 else
4763 {
4764 /* The case where we are excluding both ranges. Here the complex case
4765 is if they don't overlap. In that case, the only time we have a
4766 range is if they are adjacent. If the second is a subset of the
4767 first, the result is the first. Otherwise, the range to exclude
4768 starts at the beginning of the first range and ends at the end of the
4769 second. */
4770 if (no_overlap)
4771 {
4772 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4773 range_successor (high0),
4774 1, low1, 0)))
4775 in_p = 0, low = low0, high = high1;
4776 else
4777 {
4778 /* Canonicalize - [min, x] into - [-, x]. */
4779 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4780 switch (TREE_CODE (TREE_TYPE (low0)))
4781 {
4782 case ENUMERAL_TYPE:
4783 if (TYPE_PRECISION (TREE_TYPE (low0))
4784 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4785 break;
4786 /* FALLTHROUGH */
4787 case INTEGER_TYPE:
4788 if (tree_int_cst_equal (low0,
4789 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4790 low0 = 0;
4791 break;
4792 case POINTER_TYPE:
4793 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4794 && integer_zerop (low0))
4795 low0 = 0;
4796 break;
4797 default:
4798 break;
4799 }
4800
4801 /* Canonicalize - [x, max] into - [x, -]. */
4802 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4803 switch (TREE_CODE (TREE_TYPE (high1)))
4804 {
4805 case ENUMERAL_TYPE:
4806 if (TYPE_PRECISION (TREE_TYPE (high1))
4807 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4808 break;
4809 /* FALLTHROUGH */
4810 case INTEGER_TYPE:
4811 if (tree_int_cst_equal (high1,
4812 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4813 high1 = 0;
4814 break;
4815 case POINTER_TYPE:
4816 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4817 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4818 high1, 1,
4819 build_int_cst (TREE_TYPE (high1), 1),
4820 1)))
4821 high1 = 0;
4822 break;
4823 default:
4824 break;
4825 }
4826
4827 /* The ranges might be also adjacent between the maximum and
4828 minimum values of the given type. For
4829 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4830 return + [x + 1, y - 1]. */
4831 if (low0 == 0 && high1 == 0)
4832 {
4833 low = range_successor (high0);
4834 high = range_predecessor (low1);
4835 if (low == 0 || high == 0)
4836 return 0;
4837
4838 in_p = 1;
4839 }
4840 else
4841 return 0;
4842 }
4843 }
4844 else if (subset)
4845 in_p = 0, low = low0, high = high0;
4846 else
4847 in_p = 0, low = low0, high = high1;
4848 }
4849
4850 *pin_p = in_p, *plow = low, *phigh = high;
4851 return 1;
4852 }
4853 \f
4854
4855 /* Subroutine of fold, looking inside expressions of the form
4856 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4857 of the COND_EXPR. This function is being used also to optimize
4858 A op B ? C : A, by reversing the comparison first.
4859
4860 Return a folded expression whose code is not a COND_EXPR
4861 anymore, or NULL_TREE if no folding opportunity is found. */
4862
4863 static tree
4864 fold_cond_expr_with_comparison (location_t loc, tree type,
4865 tree arg0, tree arg1, tree arg2)
4866 {
4867 enum tree_code comp_code = TREE_CODE (arg0);
4868 tree arg00 = TREE_OPERAND (arg0, 0);
4869 tree arg01 = TREE_OPERAND (arg0, 1);
4870 tree arg1_type = TREE_TYPE (arg1);
4871 tree tem;
4872
4873 STRIP_NOPS (arg1);
4874 STRIP_NOPS (arg2);
4875
4876 /* If we have A op 0 ? A : -A, consider applying the following
4877 transformations:
4878
4879 A == 0? A : -A same as -A
4880 A != 0? A : -A same as A
4881 A >= 0? A : -A same as abs (A)
4882 A > 0? A : -A same as abs (A)
4883 A <= 0? A : -A same as -abs (A)
4884 A < 0? A : -A same as -abs (A)
4885
4886 None of these transformations work for modes with signed
4887 zeros. If A is +/-0, the first two transformations will
4888 change the sign of the result (from +0 to -0, or vice
4889 versa). The last four will fix the sign of the result,
4890 even though the original expressions could be positive or
4891 negative, depending on the sign of A.
4892
4893 Note that all these transformations are correct if A is
4894 NaN, since the two alternatives (A and -A) are also NaNs. */
4895 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4896 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4897 ? real_zerop (arg01)
4898 : integer_zerop (arg01))
4899 && ((TREE_CODE (arg2) == NEGATE_EXPR
4900 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4901 /* In the case that A is of the form X-Y, '-A' (arg2) may
4902 have already been folded to Y-X, check for that. */
4903 || (TREE_CODE (arg1) == MINUS_EXPR
4904 && TREE_CODE (arg2) == MINUS_EXPR
4905 && operand_equal_p (TREE_OPERAND (arg1, 0),
4906 TREE_OPERAND (arg2, 1), 0)
4907 && operand_equal_p (TREE_OPERAND (arg1, 1),
4908 TREE_OPERAND (arg2, 0), 0))))
4909 switch (comp_code)
4910 {
4911 case EQ_EXPR:
4912 case UNEQ_EXPR:
4913 tem = fold_convert_loc (loc, arg1_type, arg1);
4914 return pedantic_non_lvalue_loc (loc,
4915 fold_convert_loc (loc, type,
4916 negate_expr (tem)));
4917 case NE_EXPR:
4918 case LTGT_EXPR:
4919 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4920 case UNGE_EXPR:
4921 case UNGT_EXPR:
4922 if (flag_trapping_math)
4923 break;
4924 /* Fall through. */
4925 case GE_EXPR:
4926 case GT_EXPR:
4927 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4928 arg1 = fold_convert_loc (loc, signed_type_for
4929 (TREE_TYPE (arg1)), arg1);
4930 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4931 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4932 case UNLE_EXPR:
4933 case UNLT_EXPR:
4934 if (flag_trapping_math)
4935 break;
4936 case LE_EXPR:
4937 case LT_EXPR:
4938 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4939 arg1 = fold_convert_loc (loc, signed_type_for
4940 (TREE_TYPE (arg1)), arg1);
4941 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4942 return negate_expr (fold_convert_loc (loc, type, tem));
4943 default:
4944 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4945 break;
4946 }
4947
4948 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4949 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4950 both transformations are correct when A is NaN: A != 0
4951 is then true, and A == 0 is false. */
4952
4953 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4954 && integer_zerop (arg01) && integer_zerop (arg2))
4955 {
4956 if (comp_code == NE_EXPR)
4957 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4958 else if (comp_code == EQ_EXPR)
4959 return build_zero_cst (type);
4960 }
4961
4962 /* Try some transformations of A op B ? A : B.
4963
4964 A == B? A : B same as B
4965 A != B? A : B same as A
4966 A >= B? A : B same as max (A, B)
4967 A > B? A : B same as max (B, A)
4968 A <= B? A : B same as min (A, B)
4969 A < B? A : B same as min (B, A)
4970
4971 As above, these transformations don't work in the presence
4972 of signed zeros. For example, if A and B are zeros of
4973 opposite sign, the first two transformations will change
4974 the sign of the result. In the last four, the original
4975 expressions give different results for (A=+0, B=-0) and
4976 (A=-0, B=+0), but the transformed expressions do not.
4977
4978 The first two transformations are correct if either A or B
4979 is a NaN. In the first transformation, the condition will
4980 be false, and B will indeed be chosen. In the case of the
4981 second transformation, the condition A != B will be true,
4982 and A will be chosen.
4983
4984 The conversions to max() and min() are not correct if B is
4985 a number and A is not. The conditions in the original
4986 expressions will be false, so all four give B. The min()
4987 and max() versions would give a NaN instead. */
4988 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4989 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4990 /* Avoid these transformations if the COND_EXPR may be used
4991 as an lvalue in the C++ front-end. PR c++/19199. */
4992 && (in_gimple_form
4993 || VECTOR_TYPE_P (type)
4994 || (! lang_GNU_CXX ()
4995 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4996 || ! maybe_lvalue_p (arg1)
4997 || ! maybe_lvalue_p (arg2)))
4998 {
4999 tree comp_op0 = arg00;
5000 tree comp_op1 = arg01;
5001 tree comp_type = TREE_TYPE (comp_op0);
5002
5003 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5004 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5005 {
5006 comp_type = type;
5007 comp_op0 = arg1;
5008 comp_op1 = arg2;
5009 }
5010
5011 switch (comp_code)
5012 {
5013 case EQ_EXPR:
5014 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5015 case NE_EXPR:
5016 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5017 case LE_EXPR:
5018 case LT_EXPR:
5019 case UNLE_EXPR:
5020 case UNLT_EXPR:
5021 /* In C++ a ?: expression can be an lvalue, so put the
5022 operand which will be used if they are equal first
5023 so that we can convert this back to the
5024 corresponding COND_EXPR. */
5025 if (!HONOR_NANS (arg1))
5026 {
5027 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5028 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5029 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5030 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5031 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5032 comp_op1, comp_op0);
5033 return pedantic_non_lvalue_loc (loc,
5034 fold_convert_loc (loc, type, tem));
5035 }
5036 break;
5037 case GE_EXPR:
5038 case GT_EXPR:
5039 case UNGE_EXPR:
5040 case UNGT_EXPR:
5041 if (!HONOR_NANS (arg1))
5042 {
5043 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5044 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5045 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5046 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5047 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5048 comp_op1, comp_op0);
5049 return pedantic_non_lvalue_loc (loc,
5050 fold_convert_loc (loc, type, tem));
5051 }
5052 break;
5053 case UNEQ_EXPR:
5054 if (!HONOR_NANS (arg1))
5055 return pedantic_non_lvalue_loc (loc,
5056 fold_convert_loc (loc, type, arg2));
5057 break;
5058 case LTGT_EXPR:
5059 if (!HONOR_NANS (arg1))
5060 return pedantic_non_lvalue_loc (loc,
5061 fold_convert_loc (loc, type, arg1));
5062 break;
5063 default:
5064 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5065 break;
5066 }
5067 }
5068
5069 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5070 we might still be able to simplify this. For example,
5071 if C1 is one less or one more than C2, this might have started
5072 out as a MIN or MAX and been transformed by this function.
5073 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5074
5075 if (INTEGRAL_TYPE_P (type)
5076 && TREE_CODE (arg01) == INTEGER_CST
5077 && TREE_CODE (arg2) == INTEGER_CST)
5078 switch (comp_code)
5079 {
5080 case EQ_EXPR:
5081 if (TREE_CODE (arg1) == INTEGER_CST)
5082 break;
5083 /* We can replace A with C1 in this case. */
5084 arg1 = fold_convert_loc (loc, type, arg01);
5085 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5086
5087 case LT_EXPR:
5088 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5089 MIN_EXPR, to preserve the signedness of the comparison. */
5090 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5091 OEP_ONLY_CONST)
5092 && operand_equal_p (arg01,
5093 const_binop (PLUS_EXPR, arg2,
5094 build_int_cst (type, 1)),
5095 OEP_ONLY_CONST))
5096 {
5097 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5098 fold_convert_loc (loc, TREE_TYPE (arg00),
5099 arg2));
5100 return pedantic_non_lvalue_loc (loc,
5101 fold_convert_loc (loc, type, tem));
5102 }
5103 break;
5104
5105 case LE_EXPR:
5106 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5107 as above. */
5108 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5109 OEP_ONLY_CONST)
5110 && operand_equal_p (arg01,
5111 const_binop (MINUS_EXPR, arg2,
5112 build_int_cst (type, 1)),
5113 OEP_ONLY_CONST))
5114 {
5115 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5116 fold_convert_loc (loc, TREE_TYPE (arg00),
5117 arg2));
5118 return pedantic_non_lvalue_loc (loc,
5119 fold_convert_loc (loc, type, tem));
5120 }
5121 break;
5122
5123 case GT_EXPR:
5124 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5125 MAX_EXPR, to preserve the signedness of the comparison. */
5126 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5127 OEP_ONLY_CONST)
5128 && operand_equal_p (arg01,
5129 const_binop (MINUS_EXPR, arg2,
5130 build_int_cst (type, 1)),
5131 OEP_ONLY_CONST))
5132 {
5133 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5134 fold_convert_loc (loc, TREE_TYPE (arg00),
5135 arg2));
5136 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5137 }
5138 break;
5139
5140 case GE_EXPR:
5141 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5142 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5143 OEP_ONLY_CONST)
5144 && operand_equal_p (arg01,
5145 const_binop (PLUS_EXPR, arg2,
5146 build_int_cst (type, 1)),
5147 OEP_ONLY_CONST))
5148 {
5149 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5150 fold_convert_loc (loc, TREE_TYPE (arg00),
5151 arg2));
5152 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5153 }
5154 break;
5155 case NE_EXPR:
5156 break;
5157 default:
5158 gcc_unreachable ();
5159 }
5160
5161 return NULL_TREE;
5162 }
5163
5164
5165 \f
5166 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5167 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5168 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5169 false) >= 2)
5170 #endif
5171
5172 /* EXP is some logical combination of boolean tests. See if we can
5173 merge it into some range test. Return the new tree if so. */
5174
5175 static tree
5176 fold_range_test (location_t loc, enum tree_code code, tree type,
5177 tree op0, tree op1)
5178 {
5179 int or_op = (code == TRUTH_ORIF_EXPR
5180 || code == TRUTH_OR_EXPR);
5181 int in0_p, in1_p, in_p;
5182 tree low0, low1, low, high0, high1, high;
5183 bool strict_overflow_p = false;
5184 tree tem, lhs, rhs;
5185 const char * const warnmsg = G_("assuming signed overflow does not occur "
5186 "when simplifying range test");
5187
5188 if (!INTEGRAL_TYPE_P (type))
5189 return 0;
5190
5191 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5192 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5193
5194 /* If this is an OR operation, invert both sides; we will invert
5195 again at the end. */
5196 if (or_op)
5197 in0_p = ! in0_p, in1_p = ! in1_p;
5198
5199 /* If both expressions are the same, if we can merge the ranges, and we
5200 can build the range test, return it or it inverted. If one of the
5201 ranges is always true or always false, consider it to be the same
5202 expression as the other. */
5203 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5204 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5205 in1_p, low1, high1)
5206 && 0 != (tem = (build_range_check (loc, type,
5207 lhs != 0 ? lhs
5208 : rhs != 0 ? rhs : integer_zero_node,
5209 in_p, low, high))))
5210 {
5211 if (strict_overflow_p)
5212 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5213 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5214 }
5215
5216 /* On machines where the branch cost is expensive, if this is a
5217 short-circuited branch and the underlying object on both sides
5218 is the same, make a non-short-circuit operation. */
5219 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5220 && lhs != 0 && rhs != 0
5221 && (code == TRUTH_ANDIF_EXPR
5222 || code == TRUTH_ORIF_EXPR)
5223 && operand_equal_p (lhs, rhs, 0))
5224 {
5225 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5226 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5227 which cases we can't do this. */
5228 if (simple_operand_p (lhs))
5229 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5230 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5231 type, op0, op1);
5232
5233 else if (!lang_hooks.decls.global_bindings_p ()
5234 && !CONTAINS_PLACEHOLDER_P (lhs))
5235 {
5236 tree common = save_expr (lhs);
5237
5238 if (0 != (lhs = build_range_check (loc, type, common,
5239 or_op ? ! in0_p : in0_p,
5240 low0, high0))
5241 && (0 != (rhs = build_range_check (loc, type, common,
5242 or_op ? ! in1_p : in1_p,
5243 low1, high1))))
5244 {
5245 if (strict_overflow_p)
5246 fold_overflow_warning (warnmsg,
5247 WARN_STRICT_OVERFLOW_COMPARISON);
5248 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5249 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5250 type, lhs, rhs);
5251 }
5252 }
5253 }
5254
5255 return 0;
5256 }
5257 \f
5258 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5259 bit value. Arrange things so the extra bits will be set to zero if and
5260 only if C is signed-extended to its full width. If MASK is nonzero,
5261 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5262
5263 static tree
5264 unextend (tree c, int p, int unsignedp, tree mask)
5265 {
5266 tree type = TREE_TYPE (c);
5267 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5268 tree temp;
5269
5270 if (p == modesize || unsignedp)
5271 return c;
5272
5273 /* We work by getting just the sign bit into the low-order bit, then
5274 into the high-order bit, then sign-extend. We then XOR that value
5275 with C. */
5276 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5277
5278 /* We must use a signed type in order to get an arithmetic right shift.
5279 However, we must also avoid introducing accidental overflows, so that
5280 a subsequent call to integer_zerop will work. Hence we must
5281 do the type conversion here. At this point, the constant is either
5282 zero or one, and the conversion to a signed type can never overflow.
5283 We could get an overflow if this conversion is done anywhere else. */
5284 if (TYPE_UNSIGNED (type))
5285 temp = fold_convert (signed_type_for (type), temp);
5286
5287 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5288 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5289 if (mask != 0)
5290 temp = const_binop (BIT_AND_EXPR, temp,
5291 fold_convert (TREE_TYPE (c), mask));
5292 /* If necessary, convert the type back to match the type of C. */
5293 if (TYPE_UNSIGNED (type))
5294 temp = fold_convert (type, temp);
5295
5296 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5297 }
5298 \f
5299 /* For an expression that has the form
5300 (A && B) || ~B
5301 or
5302 (A || B) && ~B,
5303 we can drop one of the inner expressions and simplify to
5304 A || ~B
5305 or
5306 A && ~B
5307 LOC is the location of the resulting expression. OP is the inner
5308 logical operation; the left-hand side in the examples above, while CMPOP
5309 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5310 removing a condition that guards another, as in
5311 (A != NULL && A->...) || A == NULL
5312 which we must not transform. If RHS_ONLY is true, only eliminate the
5313 right-most operand of the inner logical operation. */
5314
5315 static tree
5316 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5317 bool rhs_only)
5318 {
5319 tree type = TREE_TYPE (cmpop);
5320 enum tree_code code = TREE_CODE (cmpop);
5321 enum tree_code truthop_code = TREE_CODE (op);
5322 tree lhs = TREE_OPERAND (op, 0);
5323 tree rhs = TREE_OPERAND (op, 1);
5324 tree orig_lhs = lhs, orig_rhs = rhs;
5325 enum tree_code rhs_code = TREE_CODE (rhs);
5326 enum tree_code lhs_code = TREE_CODE (lhs);
5327 enum tree_code inv_code;
5328
5329 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5330 return NULL_TREE;
5331
5332 if (TREE_CODE_CLASS (code) != tcc_comparison)
5333 return NULL_TREE;
5334
5335 if (rhs_code == truthop_code)
5336 {
5337 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5338 if (newrhs != NULL_TREE)
5339 {
5340 rhs = newrhs;
5341 rhs_code = TREE_CODE (rhs);
5342 }
5343 }
5344 if (lhs_code == truthop_code && !rhs_only)
5345 {
5346 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5347 if (newlhs != NULL_TREE)
5348 {
5349 lhs = newlhs;
5350 lhs_code = TREE_CODE (lhs);
5351 }
5352 }
5353
5354 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5355 if (inv_code == rhs_code
5356 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5357 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5358 return lhs;
5359 if (!rhs_only && inv_code == lhs_code
5360 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5361 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5362 return rhs;
5363 if (rhs != orig_rhs || lhs != orig_lhs)
5364 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5365 lhs, rhs);
5366 return NULL_TREE;
5367 }
5368
5369 /* Find ways of folding logical expressions of LHS and RHS:
5370 Try to merge two comparisons to the same innermost item.
5371 Look for range tests like "ch >= '0' && ch <= '9'".
5372 Look for combinations of simple terms on machines with expensive branches
5373 and evaluate the RHS unconditionally.
5374
5375 For example, if we have p->a == 2 && p->b == 4 and we can make an
5376 object large enough to span both A and B, we can do this with a comparison
5377 against the object ANDed with the a mask.
5378
5379 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5380 operations to do this with one comparison.
5381
5382 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5383 function and the one above.
5384
5385 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5386 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5387
5388 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5389 two operands.
5390
5391 We return the simplified tree or 0 if no optimization is possible. */
5392
5393 static tree
5394 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5395 tree lhs, tree rhs)
5396 {
5397 /* If this is the "or" of two comparisons, we can do something if
5398 the comparisons are NE_EXPR. If this is the "and", we can do something
5399 if the comparisons are EQ_EXPR. I.e.,
5400 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5401
5402 WANTED_CODE is this operation code. For single bit fields, we can
5403 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5404 comparison for one-bit fields. */
5405
5406 enum tree_code wanted_code;
5407 enum tree_code lcode, rcode;
5408 tree ll_arg, lr_arg, rl_arg, rr_arg;
5409 tree ll_inner, lr_inner, rl_inner, rr_inner;
5410 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5411 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5412 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5413 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5414 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5415 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5416 machine_mode lnmode, rnmode;
5417 tree ll_mask, lr_mask, rl_mask, rr_mask;
5418 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5419 tree l_const, r_const;
5420 tree lntype, rntype, result;
5421 HOST_WIDE_INT first_bit, end_bit;
5422 int volatilep;
5423
5424 /* Start by getting the comparison codes. Fail if anything is volatile.
5425 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5426 it were surrounded with a NE_EXPR. */
5427
5428 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5429 return 0;
5430
5431 lcode = TREE_CODE (lhs);
5432 rcode = TREE_CODE (rhs);
5433
5434 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5435 {
5436 lhs = build2 (NE_EXPR, truth_type, lhs,
5437 build_int_cst (TREE_TYPE (lhs), 0));
5438 lcode = NE_EXPR;
5439 }
5440
5441 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5442 {
5443 rhs = build2 (NE_EXPR, truth_type, rhs,
5444 build_int_cst (TREE_TYPE (rhs), 0));
5445 rcode = NE_EXPR;
5446 }
5447
5448 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5449 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5450 return 0;
5451
5452 ll_arg = TREE_OPERAND (lhs, 0);
5453 lr_arg = TREE_OPERAND (lhs, 1);
5454 rl_arg = TREE_OPERAND (rhs, 0);
5455 rr_arg = TREE_OPERAND (rhs, 1);
5456
5457 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5458 if (simple_operand_p (ll_arg)
5459 && simple_operand_p (lr_arg))
5460 {
5461 if (operand_equal_p (ll_arg, rl_arg, 0)
5462 && operand_equal_p (lr_arg, rr_arg, 0))
5463 {
5464 result = combine_comparisons (loc, code, lcode, rcode,
5465 truth_type, ll_arg, lr_arg);
5466 if (result)
5467 return result;
5468 }
5469 else if (operand_equal_p (ll_arg, rr_arg, 0)
5470 && operand_equal_p (lr_arg, rl_arg, 0))
5471 {
5472 result = combine_comparisons (loc, code, lcode,
5473 swap_tree_comparison (rcode),
5474 truth_type, ll_arg, lr_arg);
5475 if (result)
5476 return result;
5477 }
5478 }
5479
5480 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5481 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5482
5483 /* If the RHS can be evaluated unconditionally and its operands are
5484 simple, it wins to evaluate the RHS unconditionally on machines
5485 with expensive branches. In this case, this isn't a comparison
5486 that can be merged. */
5487
5488 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5489 false) >= 2
5490 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5491 && simple_operand_p (rl_arg)
5492 && simple_operand_p (rr_arg))
5493 {
5494 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5495 if (code == TRUTH_OR_EXPR
5496 && lcode == NE_EXPR && integer_zerop (lr_arg)
5497 && rcode == NE_EXPR && integer_zerop (rr_arg)
5498 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5499 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5500 return build2_loc (loc, NE_EXPR, truth_type,
5501 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5502 ll_arg, rl_arg),
5503 build_int_cst (TREE_TYPE (ll_arg), 0));
5504
5505 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5506 if (code == TRUTH_AND_EXPR
5507 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5508 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5509 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5510 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5511 return build2_loc (loc, EQ_EXPR, truth_type,
5512 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5513 ll_arg, rl_arg),
5514 build_int_cst (TREE_TYPE (ll_arg), 0));
5515 }
5516
5517 /* See if the comparisons can be merged. Then get all the parameters for
5518 each side. */
5519
5520 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5521 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5522 return 0;
5523
5524 volatilep = 0;
5525 ll_inner = decode_field_reference (loc, ll_arg,
5526 &ll_bitsize, &ll_bitpos, &ll_mode,
5527 &ll_unsignedp, &volatilep, &ll_mask,
5528 &ll_and_mask);
5529 lr_inner = decode_field_reference (loc, lr_arg,
5530 &lr_bitsize, &lr_bitpos, &lr_mode,
5531 &lr_unsignedp, &volatilep, &lr_mask,
5532 &lr_and_mask);
5533 rl_inner = decode_field_reference (loc, rl_arg,
5534 &rl_bitsize, &rl_bitpos, &rl_mode,
5535 &rl_unsignedp, &volatilep, &rl_mask,
5536 &rl_and_mask);
5537 rr_inner = decode_field_reference (loc, rr_arg,
5538 &rr_bitsize, &rr_bitpos, &rr_mode,
5539 &rr_unsignedp, &volatilep, &rr_mask,
5540 &rr_and_mask);
5541
5542 /* It must be true that the inner operation on the lhs of each
5543 comparison must be the same if we are to be able to do anything.
5544 Then see if we have constants. If not, the same must be true for
5545 the rhs's. */
5546 if (volatilep || ll_inner == 0 || rl_inner == 0
5547 || ! operand_equal_p (ll_inner, rl_inner, 0))
5548 return 0;
5549
5550 if (TREE_CODE (lr_arg) == INTEGER_CST
5551 && TREE_CODE (rr_arg) == INTEGER_CST)
5552 l_const = lr_arg, r_const = rr_arg;
5553 else if (lr_inner == 0 || rr_inner == 0
5554 || ! operand_equal_p (lr_inner, rr_inner, 0))
5555 return 0;
5556 else
5557 l_const = r_const = 0;
5558
5559 /* If either comparison code is not correct for our logical operation,
5560 fail. However, we can convert a one-bit comparison against zero into
5561 the opposite comparison against that bit being set in the field. */
5562
5563 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5564 if (lcode != wanted_code)
5565 {
5566 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5567 {
5568 /* Make the left operand unsigned, since we are only interested
5569 in the value of one bit. Otherwise we are doing the wrong
5570 thing below. */
5571 ll_unsignedp = 1;
5572 l_const = ll_mask;
5573 }
5574 else
5575 return 0;
5576 }
5577
5578 /* This is analogous to the code for l_const above. */
5579 if (rcode != wanted_code)
5580 {
5581 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5582 {
5583 rl_unsignedp = 1;
5584 r_const = rl_mask;
5585 }
5586 else
5587 return 0;
5588 }
5589
5590 /* See if we can find a mode that contains both fields being compared on
5591 the left. If we can't, fail. Otherwise, update all constants and masks
5592 to be relative to a field of that size. */
5593 first_bit = MIN (ll_bitpos, rl_bitpos);
5594 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5595 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5596 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5597 volatilep);
5598 if (lnmode == VOIDmode)
5599 return 0;
5600
5601 lnbitsize = GET_MODE_BITSIZE (lnmode);
5602 lnbitpos = first_bit & ~ (lnbitsize - 1);
5603 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5604 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5605
5606 if (BYTES_BIG_ENDIAN)
5607 {
5608 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5609 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5610 }
5611
5612 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5613 size_int (xll_bitpos));
5614 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5615 size_int (xrl_bitpos));
5616
5617 if (l_const)
5618 {
5619 l_const = fold_convert_loc (loc, lntype, l_const);
5620 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5621 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5622 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5623 fold_build1_loc (loc, BIT_NOT_EXPR,
5624 lntype, ll_mask))))
5625 {
5626 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5627
5628 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5629 }
5630 }
5631 if (r_const)
5632 {
5633 r_const = fold_convert_loc (loc, lntype, r_const);
5634 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5635 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5636 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5637 fold_build1_loc (loc, BIT_NOT_EXPR,
5638 lntype, rl_mask))))
5639 {
5640 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5641
5642 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5643 }
5644 }
5645
5646 /* If the right sides are not constant, do the same for it. Also,
5647 disallow this optimization if a size or signedness mismatch occurs
5648 between the left and right sides. */
5649 if (l_const == 0)
5650 {
5651 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5652 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5653 /* Make sure the two fields on the right
5654 correspond to the left without being swapped. */
5655 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5656 return 0;
5657
5658 first_bit = MIN (lr_bitpos, rr_bitpos);
5659 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5660 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5661 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5662 volatilep);
5663 if (rnmode == VOIDmode)
5664 return 0;
5665
5666 rnbitsize = GET_MODE_BITSIZE (rnmode);
5667 rnbitpos = first_bit & ~ (rnbitsize - 1);
5668 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5669 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5670
5671 if (BYTES_BIG_ENDIAN)
5672 {
5673 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5674 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5675 }
5676
5677 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5678 rntype, lr_mask),
5679 size_int (xlr_bitpos));
5680 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5681 rntype, rr_mask),
5682 size_int (xrr_bitpos));
5683
5684 /* Make a mask that corresponds to both fields being compared.
5685 Do this for both items being compared. If the operands are the
5686 same size and the bits being compared are in the same position
5687 then we can do this by masking both and comparing the masked
5688 results. */
5689 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5690 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5691 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5692 {
5693 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5694 ll_unsignedp || rl_unsignedp);
5695 if (! all_ones_mask_p (ll_mask, lnbitsize))
5696 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5697
5698 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5699 lr_unsignedp || rr_unsignedp);
5700 if (! all_ones_mask_p (lr_mask, rnbitsize))
5701 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5702
5703 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5704 }
5705
5706 /* There is still another way we can do something: If both pairs of
5707 fields being compared are adjacent, we may be able to make a wider
5708 field containing them both.
5709
5710 Note that we still must mask the lhs/rhs expressions. Furthermore,
5711 the mask must be shifted to account for the shift done by
5712 make_bit_field_ref. */
5713 if ((ll_bitsize + ll_bitpos == rl_bitpos
5714 && lr_bitsize + lr_bitpos == rr_bitpos)
5715 || (ll_bitpos == rl_bitpos + rl_bitsize
5716 && lr_bitpos == rr_bitpos + rr_bitsize))
5717 {
5718 tree type;
5719
5720 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5721 ll_bitsize + rl_bitsize,
5722 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5723 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5724 lr_bitsize + rr_bitsize,
5725 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5726
5727 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5728 size_int (MIN (xll_bitpos, xrl_bitpos)));
5729 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5730 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5731
5732 /* Convert to the smaller type before masking out unwanted bits. */
5733 type = lntype;
5734 if (lntype != rntype)
5735 {
5736 if (lnbitsize > rnbitsize)
5737 {
5738 lhs = fold_convert_loc (loc, rntype, lhs);
5739 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5740 type = rntype;
5741 }
5742 else if (lnbitsize < rnbitsize)
5743 {
5744 rhs = fold_convert_loc (loc, lntype, rhs);
5745 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5746 type = lntype;
5747 }
5748 }
5749
5750 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5751 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5752
5753 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5754 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5755
5756 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5757 }
5758
5759 return 0;
5760 }
5761
5762 /* Handle the case of comparisons with constants. If there is something in
5763 common between the masks, those bits of the constants must be the same.
5764 If not, the condition is always false. Test for this to avoid generating
5765 incorrect code below. */
5766 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5767 if (! integer_zerop (result)
5768 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5769 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5770 {
5771 if (wanted_code == NE_EXPR)
5772 {
5773 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5774 return constant_boolean_node (true, truth_type);
5775 }
5776 else
5777 {
5778 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5779 return constant_boolean_node (false, truth_type);
5780 }
5781 }
5782
5783 /* Construct the expression we will return. First get the component
5784 reference we will make. Unless the mask is all ones the width of
5785 that field, perform the mask operation. Then compare with the
5786 merged constant. */
5787 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5788 ll_unsignedp || rl_unsignedp);
5789
5790 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5791 if (! all_ones_mask_p (ll_mask, lnbitsize))
5792 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5793
5794 return build2_loc (loc, wanted_code, truth_type, result,
5795 const_binop (BIT_IOR_EXPR, l_const, r_const));
5796 }
5797 \f
5798 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5799 constant. */
5800
5801 static tree
5802 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5803 tree op0, tree op1)
5804 {
5805 tree arg0 = op0;
5806 enum tree_code op_code;
5807 tree comp_const;
5808 tree minmax_const;
5809 int consts_equal, consts_lt;
5810 tree inner;
5811
5812 STRIP_SIGN_NOPS (arg0);
5813
5814 op_code = TREE_CODE (arg0);
5815 minmax_const = TREE_OPERAND (arg0, 1);
5816 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5817 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5818 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5819 inner = TREE_OPERAND (arg0, 0);
5820
5821 /* If something does not permit us to optimize, return the original tree. */
5822 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5823 || TREE_CODE (comp_const) != INTEGER_CST
5824 || TREE_OVERFLOW (comp_const)
5825 || TREE_CODE (minmax_const) != INTEGER_CST
5826 || TREE_OVERFLOW (minmax_const))
5827 return NULL_TREE;
5828
5829 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5830 and GT_EXPR, doing the rest with recursive calls using logical
5831 simplifications. */
5832 switch (code)
5833 {
5834 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5835 {
5836 tree tem
5837 = optimize_minmax_comparison (loc,
5838 invert_tree_comparison (code, false),
5839 type, op0, op1);
5840 if (tem)
5841 return invert_truthvalue_loc (loc, tem);
5842 return NULL_TREE;
5843 }
5844
5845 case GE_EXPR:
5846 return
5847 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5848 optimize_minmax_comparison
5849 (loc, EQ_EXPR, type, arg0, comp_const),
5850 optimize_minmax_comparison
5851 (loc, GT_EXPR, type, arg0, comp_const));
5852
5853 case EQ_EXPR:
5854 if (op_code == MAX_EXPR && consts_equal)
5855 /* MAX (X, 0) == 0 -> X <= 0 */
5856 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5857
5858 else if (op_code == MAX_EXPR && consts_lt)
5859 /* MAX (X, 0) == 5 -> X == 5 */
5860 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5861
5862 else if (op_code == MAX_EXPR)
5863 /* MAX (X, 0) == -1 -> false */
5864 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5865
5866 else if (consts_equal)
5867 /* MIN (X, 0) == 0 -> X >= 0 */
5868 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5869
5870 else if (consts_lt)
5871 /* MIN (X, 0) == 5 -> false */
5872 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5873
5874 else
5875 /* MIN (X, 0) == -1 -> X == -1 */
5876 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5877
5878 case GT_EXPR:
5879 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5880 /* MAX (X, 0) > 0 -> X > 0
5881 MAX (X, 0) > 5 -> X > 5 */
5882 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5883
5884 else if (op_code == MAX_EXPR)
5885 /* MAX (X, 0) > -1 -> true */
5886 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5887
5888 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5889 /* MIN (X, 0) > 0 -> false
5890 MIN (X, 0) > 5 -> false */
5891 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5892
5893 else
5894 /* MIN (X, 0) > -1 -> X > -1 */
5895 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5896
5897 default:
5898 return NULL_TREE;
5899 }
5900 }
5901 \f
5902 /* T is an integer expression that is being multiplied, divided, or taken a
5903 modulus (CODE says which and what kind of divide or modulus) by a
5904 constant C. See if we can eliminate that operation by folding it with
5905 other operations already in T. WIDE_TYPE, if non-null, is a type that
5906 should be used for the computation if wider than our type.
5907
5908 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5909 (X * 2) + (Y * 4). We must, however, be assured that either the original
5910 expression would not overflow or that overflow is undefined for the type
5911 in the language in question.
5912
5913 If we return a non-null expression, it is an equivalent form of the
5914 original computation, but need not be in the original type.
5915
5916 We set *STRICT_OVERFLOW_P to true if the return values depends on
5917 signed overflow being undefined. Otherwise we do not change
5918 *STRICT_OVERFLOW_P. */
5919
5920 static tree
5921 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5922 bool *strict_overflow_p)
5923 {
5924 /* To avoid exponential search depth, refuse to allow recursion past
5925 three levels. Beyond that (1) it's highly unlikely that we'll find
5926 something interesting and (2) we've probably processed it before
5927 when we built the inner expression. */
5928
5929 static int depth;
5930 tree ret;
5931
5932 if (depth > 3)
5933 return NULL;
5934
5935 depth++;
5936 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5937 depth--;
5938
5939 return ret;
5940 }
5941
5942 static tree
5943 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5944 bool *strict_overflow_p)
5945 {
5946 tree type = TREE_TYPE (t);
5947 enum tree_code tcode = TREE_CODE (t);
5948 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5949 > GET_MODE_SIZE (TYPE_MODE (type)))
5950 ? wide_type : type);
5951 tree t1, t2;
5952 int same_p = tcode == code;
5953 tree op0 = NULL_TREE, op1 = NULL_TREE;
5954 bool sub_strict_overflow_p;
5955
5956 /* Don't deal with constants of zero here; they confuse the code below. */
5957 if (integer_zerop (c))
5958 return NULL_TREE;
5959
5960 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5961 op0 = TREE_OPERAND (t, 0);
5962
5963 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5964 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5965
5966 /* Note that we need not handle conditional operations here since fold
5967 already handles those cases. So just do arithmetic here. */
5968 switch (tcode)
5969 {
5970 case INTEGER_CST:
5971 /* For a constant, we can always simplify if we are a multiply
5972 or (for divide and modulus) if it is a multiple of our constant. */
5973 if (code == MULT_EXPR
5974 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5975 return const_binop (code, fold_convert (ctype, t),
5976 fold_convert (ctype, c));
5977 break;
5978
5979 CASE_CONVERT: case NON_LVALUE_EXPR:
5980 /* If op0 is an expression ... */
5981 if ((COMPARISON_CLASS_P (op0)
5982 || UNARY_CLASS_P (op0)
5983 || BINARY_CLASS_P (op0)
5984 || VL_EXP_CLASS_P (op0)
5985 || EXPRESSION_CLASS_P (op0))
5986 /* ... and has wrapping overflow, and its type is smaller
5987 than ctype, then we cannot pass through as widening. */
5988 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5989 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5990 && (TYPE_PRECISION (ctype)
5991 > TYPE_PRECISION (TREE_TYPE (op0))))
5992 /* ... or this is a truncation (t is narrower than op0),
5993 then we cannot pass through this narrowing. */
5994 || (TYPE_PRECISION (type)
5995 < TYPE_PRECISION (TREE_TYPE (op0)))
5996 /* ... or signedness changes for division or modulus,
5997 then we cannot pass through this conversion. */
5998 || (code != MULT_EXPR
5999 && (TYPE_UNSIGNED (ctype)
6000 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6001 /* ... or has undefined overflow while the converted to
6002 type has not, we cannot do the operation in the inner type
6003 as that would introduce undefined overflow. */
6004 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6005 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6006 && !TYPE_OVERFLOW_UNDEFINED (type))))
6007 break;
6008
6009 /* Pass the constant down and see if we can make a simplification. If
6010 we can, replace this expression with the inner simplification for
6011 possible later conversion to our or some other type. */
6012 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6013 && TREE_CODE (t2) == INTEGER_CST
6014 && !TREE_OVERFLOW (t2)
6015 && (0 != (t1 = extract_muldiv (op0, t2, code,
6016 code == MULT_EXPR
6017 ? ctype : NULL_TREE,
6018 strict_overflow_p))))
6019 return t1;
6020 break;
6021
6022 case ABS_EXPR:
6023 /* If widening the type changes it from signed to unsigned, then we
6024 must avoid building ABS_EXPR itself as unsigned. */
6025 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6026 {
6027 tree cstype = (*signed_type_for) (ctype);
6028 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6029 != 0)
6030 {
6031 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6032 return fold_convert (ctype, t1);
6033 }
6034 break;
6035 }
6036 /* If the constant is negative, we cannot simplify this. */
6037 if (tree_int_cst_sgn (c) == -1)
6038 break;
6039 /* FALLTHROUGH */
6040 case NEGATE_EXPR:
6041 /* For division and modulus, type can't be unsigned, as e.g.
6042 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6043 For signed types, even with wrapping overflow, this is fine. */
6044 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6045 break;
6046 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6047 != 0)
6048 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6049 break;
6050
6051 case MIN_EXPR: case MAX_EXPR:
6052 /* If widening the type changes the signedness, then we can't perform
6053 this optimization as that changes the result. */
6054 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6055 break;
6056
6057 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6058 sub_strict_overflow_p = false;
6059 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6060 &sub_strict_overflow_p)) != 0
6061 && (t2 = extract_muldiv (op1, c, code, wide_type,
6062 &sub_strict_overflow_p)) != 0)
6063 {
6064 if (tree_int_cst_sgn (c) < 0)
6065 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6066 if (sub_strict_overflow_p)
6067 *strict_overflow_p = true;
6068 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6069 fold_convert (ctype, t2));
6070 }
6071 break;
6072
6073 case LSHIFT_EXPR: case RSHIFT_EXPR:
6074 /* If the second operand is constant, this is a multiplication
6075 or floor division, by a power of two, so we can treat it that
6076 way unless the multiplier or divisor overflows. Signed
6077 left-shift overflow is implementation-defined rather than
6078 undefined in C90, so do not convert signed left shift into
6079 multiplication. */
6080 if (TREE_CODE (op1) == INTEGER_CST
6081 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6082 /* const_binop may not detect overflow correctly,
6083 so check for it explicitly here. */
6084 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6085 && 0 != (t1 = fold_convert (ctype,
6086 const_binop (LSHIFT_EXPR,
6087 size_one_node,
6088 op1)))
6089 && !TREE_OVERFLOW (t1))
6090 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6091 ? MULT_EXPR : FLOOR_DIV_EXPR,
6092 ctype,
6093 fold_convert (ctype, op0),
6094 t1),
6095 c, code, wide_type, strict_overflow_p);
6096 break;
6097
6098 case PLUS_EXPR: case MINUS_EXPR:
6099 /* See if we can eliminate the operation on both sides. If we can, we
6100 can return a new PLUS or MINUS. If we can't, the only remaining
6101 cases where we can do anything are if the second operand is a
6102 constant. */
6103 sub_strict_overflow_p = false;
6104 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6105 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6106 if (t1 != 0 && t2 != 0
6107 && (code == MULT_EXPR
6108 /* If not multiplication, we can only do this if both operands
6109 are divisible by c. */
6110 || (multiple_of_p (ctype, op0, c)
6111 && multiple_of_p (ctype, op1, c))))
6112 {
6113 if (sub_strict_overflow_p)
6114 *strict_overflow_p = true;
6115 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6116 fold_convert (ctype, t2));
6117 }
6118
6119 /* If this was a subtraction, negate OP1 and set it to be an addition.
6120 This simplifies the logic below. */
6121 if (tcode == MINUS_EXPR)
6122 {
6123 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6124 /* If OP1 was not easily negatable, the constant may be OP0. */
6125 if (TREE_CODE (op0) == INTEGER_CST)
6126 {
6127 tree tem = op0;
6128 op0 = op1;
6129 op1 = tem;
6130 tem = t1;
6131 t1 = t2;
6132 t2 = tem;
6133 }
6134 }
6135
6136 if (TREE_CODE (op1) != INTEGER_CST)
6137 break;
6138
6139 /* If either OP1 or C are negative, this optimization is not safe for
6140 some of the division and remainder types while for others we need
6141 to change the code. */
6142 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6143 {
6144 if (code == CEIL_DIV_EXPR)
6145 code = FLOOR_DIV_EXPR;
6146 else if (code == FLOOR_DIV_EXPR)
6147 code = CEIL_DIV_EXPR;
6148 else if (code != MULT_EXPR
6149 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6150 break;
6151 }
6152
6153 /* If it's a multiply or a division/modulus operation of a multiple
6154 of our constant, do the operation and verify it doesn't overflow. */
6155 if (code == MULT_EXPR
6156 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6157 {
6158 op1 = const_binop (code, fold_convert (ctype, op1),
6159 fold_convert (ctype, c));
6160 /* We allow the constant to overflow with wrapping semantics. */
6161 if (op1 == 0
6162 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6163 break;
6164 }
6165 else
6166 break;
6167
6168 /* If we have an unsigned type, we cannot widen the operation since it
6169 will change the result if the original computation overflowed. */
6170 if (TYPE_UNSIGNED (ctype) && ctype != type)
6171 break;
6172
6173 /* If we were able to eliminate our operation from the first side,
6174 apply our operation to the second side and reform the PLUS. */
6175 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6176 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6177
6178 /* The last case is if we are a multiply. In that case, we can
6179 apply the distributive law to commute the multiply and addition
6180 if the multiplication of the constants doesn't overflow
6181 and overflow is defined. With undefined overflow
6182 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6183 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6184 return fold_build2 (tcode, ctype,
6185 fold_build2 (code, ctype,
6186 fold_convert (ctype, op0),
6187 fold_convert (ctype, c)),
6188 op1);
6189
6190 break;
6191
6192 case MULT_EXPR:
6193 /* We have a special case here if we are doing something like
6194 (C * 8) % 4 since we know that's zero. */
6195 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6196 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6197 /* If the multiplication can overflow we cannot optimize this. */
6198 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6199 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6200 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6201 {
6202 *strict_overflow_p = true;
6203 return omit_one_operand (type, integer_zero_node, op0);
6204 }
6205
6206 /* ... fall through ... */
6207
6208 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6209 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6210 /* If we can extract our operation from the LHS, do so and return a
6211 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6212 do something only if the second operand is a constant. */
6213 if (same_p
6214 && (t1 = extract_muldiv (op0, c, code, wide_type,
6215 strict_overflow_p)) != 0)
6216 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6217 fold_convert (ctype, op1));
6218 else if (tcode == MULT_EXPR && code == MULT_EXPR
6219 && (t1 = extract_muldiv (op1, c, code, wide_type,
6220 strict_overflow_p)) != 0)
6221 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6222 fold_convert (ctype, t1));
6223 else if (TREE_CODE (op1) != INTEGER_CST)
6224 return 0;
6225
6226 /* If these are the same operation types, we can associate them
6227 assuming no overflow. */
6228 if (tcode == code)
6229 {
6230 bool overflow_p = false;
6231 bool overflow_mul_p;
6232 signop sign = TYPE_SIGN (ctype);
6233 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6234 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6235 if (overflow_mul_p
6236 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6237 overflow_p = true;
6238 if (!overflow_p)
6239 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6240 wide_int_to_tree (ctype, mul));
6241 }
6242
6243 /* If these operations "cancel" each other, we have the main
6244 optimizations of this pass, which occur when either constant is a
6245 multiple of the other, in which case we replace this with either an
6246 operation or CODE or TCODE.
6247
6248 If we have an unsigned type, we cannot do this since it will change
6249 the result if the original computation overflowed. */
6250 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6251 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6252 || (tcode == MULT_EXPR
6253 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6254 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6255 && code != MULT_EXPR)))
6256 {
6257 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6258 {
6259 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6260 *strict_overflow_p = true;
6261 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6262 fold_convert (ctype,
6263 const_binop (TRUNC_DIV_EXPR,
6264 op1, c)));
6265 }
6266 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6267 {
6268 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6269 *strict_overflow_p = true;
6270 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6271 fold_convert (ctype,
6272 const_binop (TRUNC_DIV_EXPR,
6273 c, op1)));
6274 }
6275 }
6276 break;
6277
6278 default:
6279 break;
6280 }
6281
6282 return 0;
6283 }
6284 \f
6285 /* Return a node which has the indicated constant VALUE (either 0 or
6286 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6287 and is of the indicated TYPE. */
6288
6289 tree
6290 constant_boolean_node (bool value, tree type)
6291 {
6292 if (type == integer_type_node)
6293 return value ? integer_one_node : integer_zero_node;
6294 else if (type == boolean_type_node)
6295 return value ? boolean_true_node : boolean_false_node;
6296 else if (TREE_CODE (type) == VECTOR_TYPE)
6297 return build_vector_from_val (type,
6298 build_int_cst (TREE_TYPE (type),
6299 value ? -1 : 0));
6300 else
6301 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6302 }
6303
6304
6305 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6306 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6307 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6308 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6309 COND is the first argument to CODE; otherwise (as in the example
6310 given here), it is the second argument. TYPE is the type of the
6311 original expression. Return NULL_TREE if no simplification is
6312 possible. */
6313
6314 static tree
6315 fold_binary_op_with_conditional_arg (location_t loc,
6316 enum tree_code code,
6317 tree type, tree op0, tree op1,
6318 tree cond, tree arg, int cond_first_p)
6319 {
6320 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6321 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6322 tree test, true_value, false_value;
6323 tree lhs = NULL_TREE;
6324 tree rhs = NULL_TREE;
6325 enum tree_code cond_code = COND_EXPR;
6326
6327 if (TREE_CODE (cond) == COND_EXPR
6328 || TREE_CODE (cond) == VEC_COND_EXPR)
6329 {
6330 test = TREE_OPERAND (cond, 0);
6331 true_value = TREE_OPERAND (cond, 1);
6332 false_value = TREE_OPERAND (cond, 2);
6333 /* If this operand throws an expression, then it does not make
6334 sense to try to perform a logical or arithmetic operation
6335 involving it. */
6336 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6337 lhs = true_value;
6338 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6339 rhs = false_value;
6340 }
6341 else
6342 {
6343 tree testtype = TREE_TYPE (cond);
6344 test = cond;
6345 true_value = constant_boolean_node (true, testtype);
6346 false_value = constant_boolean_node (false, testtype);
6347 }
6348
6349 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6350 cond_code = VEC_COND_EXPR;
6351
6352 /* This transformation is only worthwhile if we don't have to wrap ARG
6353 in a SAVE_EXPR and the operation can be simplified without recursing
6354 on at least one of the branches once its pushed inside the COND_EXPR. */
6355 if (!TREE_CONSTANT (arg)
6356 && (TREE_SIDE_EFFECTS (arg)
6357 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6358 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6359 return NULL_TREE;
6360
6361 arg = fold_convert_loc (loc, arg_type, arg);
6362 if (lhs == 0)
6363 {
6364 true_value = fold_convert_loc (loc, cond_type, true_value);
6365 if (cond_first_p)
6366 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6367 else
6368 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6369 }
6370 if (rhs == 0)
6371 {
6372 false_value = fold_convert_loc (loc, cond_type, false_value);
6373 if (cond_first_p)
6374 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6375 else
6376 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6377 }
6378
6379 /* Check that we have simplified at least one of the branches. */
6380 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6381 return NULL_TREE;
6382
6383 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6384 }
6385
6386 \f
6387 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6388
6389 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6390 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6391 ADDEND is the same as X.
6392
6393 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6394 and finite. The problematic cases are when X is zero, and its mode
6395 has signed zeros. In the case of rounding towards -infinity,
6396 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6397 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6398
6399 bool
6400 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6401 {
6402 if (!real_zerop (addend))
6403 return false;
6404
6405 /* Don't allow the fold with -fsignaling-nans. */
6406 if (HONOR_SNANS (element_mode (type)))
6407 return false;
6408
6409 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6410 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6411 return true;
6412
6413 /* In a vector or complex, we would need to check the sign of all zeros. */
6414 if (TREE_CODE (addend) != REAL_CST)
6415 return false;
6416
6417 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6418 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6419 negate = !negate;
6420
6421 /* The mode has signed zeros, and we have to honor their sign.
6422 In this situation, there is only one case we can return true for.
6423 X - 0 is the same as X unless rounding towards -infinity is
6424 supported. */
6425 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6426 }
6427
6428 /* Subroutine of fold() that checks comparisons of built-in math
6429 functions against real constants.
6430
6431 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6432 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6433 is the type of the result and ARG0 and ARG1 are the operands of the
6434 comparison. ARG1 must be a TREE_REAL_CST.
6435
6436 The function returns the constant folded tree if a simplification
6437 can be made, and NULL_TREE otherwise. */
6438
6439 static tree
6440 fold_mathfn_compare (location_t loc,
6441 enum built_in_function fcode, enum tree_code code,
6442 tree type, tree arg0, tree arg1)
6443 {
6444 REAL_VALUE_TYPE c;
6445
6446 if (BUILTIN_SQRT_P (fcode))
6447 {
6448 tree arg = CALL_EXPR_ARG (arg0, 0);
6449 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6450
6451 c = TREE_REAL_CST (arg1);
6452 if (REAL_VALUE_NEGATIVE (c))
6453 {
6454 /* sqrt(x) < y is always false, if y is negative. */
6455 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6456 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6457
6458 /* sqrt(x) > y is always true, if y is negative and we
6459 don't care about NaNs, i.e. negative values of x. */
6460 if (code == NE_EXPR || !HONOR_NANS (mode))
6461 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6462
6463 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6464 return fold_build2_loc (loc, GE_EXPR, type, arg,
6465 build_real (TREE_TYPE (arg), dconst0));
6466 }
6467 else if (code == GT_EXPR || code == GE_EXPR)
6468 {
6469 REAL_VALUE_TYPE c2;
6470
6471 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6472 real_convert (&c2, mode, &c2);
6473
6474 if (REAL_VALUE_ISINF (c2))
6475 {
6476 /* sqrt(x) > y is x == +Inf, when y is very large. */
6477 if (HONOR_INFINITIES (mode))
6478 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6479 build_real (TREE_TYPE (arg), c2));
6480
6481 /* sqrt(x) > y is always false, when y is very large
6482 and we don't care about infinities. */
6483 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6484 }
6485
6486 /* sqrt(x) > c is the same as x > c*c. */
6487 return fold_build2_loc (loc, code, type, arg,
6488 build_real (TREE_TYPE (arg), c2));
6489 }
6490 else if (code == LT_EXPR || code == LE_EXPR)
6491 {
6492 REAL_VALUE_TYPE c2;
6493
6494 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6495 real_convert (&c2, mode, &c2);
6496
6497 if (REAL_VALUE_ISINF (c2))
6498 {
6499 /* sqrt(x) < y is always true, when y is a very large
6500 value and we don't care about NaNs or Infinities. */
6501 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6502 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6503
6504 /* sqrt(x) < y is x != +Inf when y is very large and we
6505 don't care about NaNs. */
6506 if (! HONOR_NANS (mode))
6507 return fold_build2_loc (loc, NE_EXPR, type, arg,
6508 build_real (TREE_TYPE (arg), c2));
6509
6510 /* sqrt(x) < y is x >= 0 when y is very large and we
6511 don't care about Infinities. */
6512 if (! HONOR_INFINITIES (mode))
6513 return fold_build2_loc (loc, GE_EXPR, type, arg,
6514 build_real (TREE_TYPE (arg), dconst0));
6515
6516 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6517 arg = save_expr (arg);
6518 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6519 fold_build2_loc (loc, GE_EXPR, type, arg,
6520 build_real (TREE_TYPE (arg),
6521 dconst0)),
6522 fold_build2_loc (loc, NE_EXPR, type, arg,
6523 build_real (TREE_TYPE (arg),
6524 c2)));
6525 }
6526
6527 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6528 if (! HONOR_NANS (mode))
6529 return fold_build2_loc (loc, code, type, arg,
6530 build_real (TREE_TYPE (arg), c2));
6531
6532 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6533 arg = save_expr (arg);
6534 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6535 fold_build2_loc (loc, GE_EXPR, type, arg,
6536 build_real (TREE_TYPE (arg),
6537 dconst0)),
6538 fold_build2_loc (loc, code, type, arg,
6539 build_real (TREE_TYPE (arg),
6540 c2)));
6541 }
6542 }
6543
6544 return NULL_TREE;
6545 }
6546
6547 /* Subroutine of fold() that optimizes comparisons against Infinities,
6548 either +Inf or -Inf.
6549
6550 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6551 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6552 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6553
6554 The function returns the constant folded tree if a simplification
6555 can be made, and NULL_TREE otherwise. */
6556
6557 static tree
6558 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6559 tree arg0, tree arg1)
6560 {
6561 machine_mode mode;
6562 REAL_VALUE_TYPE max;
6563 tree temp;
6564 bool neg;
6565
6566 mode = TYPE_MODE (TREE_TYPE (arg0));
6567
6568 /* For negative infinity swap the sense of the comparison. */
6569 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6570 if (neg)
6571 code = swap_tree_comparison (code);
6572
6573 switch (code)
6574 {
6575 case GT_EXPR:
6576 /* x > +Inf is always false, if with ignore sNANs. */
6577 if (HONOR_SNANS (mode))
6578 return NULL_TREE;
6579 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6580
6581 case LE_EXPR:
6582 /* x <= +Inf is always true, if we don't case about NaNs. */
6583 if (! HONOR_NANS (mode))
6584 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6585
6586 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6587 arg0 = save_expr (arg0);
6588 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6589
6590 case EQ_EXPR:
6591 case GE_EXPR:
6592 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6593 real_maxval (&max, neg, mode);
6594 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6595 arg0, build_real (TREE_TYPE (arg0), max));
6596
6597 case LT_EXPR:
6598 /* x < +Inf is always equal to x <= DBL_MAX. */
6599 real_maxval (&max, neg, mode);
6600 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6601 arg0, build_real (TREE_TYPE (arg0), max));
6602
6603 case NE_EXPR:
6604 /* x != +Inf is always equal to !(x > DBL_MAX). */
6605 real_maxval (&max, neg, mode);
6606 if (! HONOR_NANS (mode))
6607 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6608 arg0, build_real (TREE_TYPE (arg0), max));
6609
6610 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6611 arg0, build_real (TREE_TYPE (arg0), max));
6612 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6613
6614 default:
6615 break;
6616 }
6617
6618 return NULL_TREE;
6619 }
6620
6621 /* Subroutine of fold() that optimizes comparisons of a division by
6622 a nonzero integer constant against an integer constant, i.e.
6623 X/C1 op C2.
6624
6625 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6626 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6627 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6628
6629 The function returns the constant folded tree if a simplification
6630 can be made, and NULL_TREE otherwise. */
6631
6632 static tree
6633 fold_div_compare (location_t loc,
6634 enum tree_code code, tree type, tree arg0, tree arg1)
6635 {
6636 tree prod, tmp, hi, lo;
6637 tree arg00 = TREE_OPERAND (arg0, 0);
6638 tree arg01 = TREE_OPERAND (arg0, 1);
6639 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6640 bool neg_overflow = false;
6641 bool overflow;
6642
6643 /* We have to do this the hard way to detect unsigned overflow.
6644 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6645 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6646 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6647 neg_overflow = false;
6648
6649 if (sign == UNSIGNED)
6650 {
6651 tmp = int_const_binop (MINUS_EXPR, arg01,
6652 build_int_cst (TREE_TYPE (arg01), 1));
6653 lo = prod;
6654
6655 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6656 val = wi::add (prod, tmp, sign, &overflow);
6657 hi = force_fit_type (TREE_TYPE (arg00), val,
6658 -1, overflow | TREE_OVERFLOW (prod));
6659 }
6660 else if (tree_int_cst_sgn (arg01) >= 0)
6661 {
6662 tmp = int_const_binop (MINUS_EXPR, arg01,
6663 build_int_cst (TREE_TYPE (arg01), 1));
6664 switch (tree_int_cst_sgn (arg1))
6665 {
6666 case -1:
6667 neg_overflow = true;
6668 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6669 hi = prod;
6670 break;
6671
6672 case 0:
6673 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6674 hi = tmp;
6675 break;
6676
6677 case 1:
6678 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6679 lo = prod;
6680 break;
6681
6682 default:
6683 gcc_unreachable ();
6684 }
6685 }
6686 else
6687 {
6688 /* A negative divisor reverses the relational operators. */
6689 code = swap_tree_comparison (code);
6690
6691 tmp = int_const_binop (PLUS_EXPR, arg01,
6692 build_int_cst (TREE_TYPE (arg01), 1));
6693 switch (tree_int_cst_sgn (arg1))
6694 {
6695 case -1:
6696 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6697 lo = prod;
6698 break;
6699
6700 case 0:
6701 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6702 lo = tmp;
6703 break;
6704
6705 case 1:
6706 neg_overflow = true;
6707 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6708 hi = prod;
6709 break;
6710
6711 default:
6712 gcc_unreachable ();
6713 }
6714 }
6715
6716 switch (code)
6717 {
6718 case EQ_EXPR:
6719 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6720 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6721 if (TREE_OVERFLOW (hi))
6722 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6723 if (TREE_OVERFLOW (lo))
6724 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6725 return build_range_check (loc, type, arg00, 1, lo, hi);
6726
6727 case NE_EXPR:
6728 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6729 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6730 if (TREE_OVERFLOW (hi))
6731 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6732 if (TREE_OVERFLOW (lo))
6733 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6734 return build_range_check (loc, type, arg00, 0, lo, hi);
6735
6736 case LT_EXPR:
6737 if (TREE_OVERFLOW (lo))
6738 {
6739 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6740 return omit_one_operand_loc (loc, type, tmp, arg00);
6741 }
6742 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6743
6744 case LE_EXPR:
6745 if (TREE_OVERFLOW (hi))
6746 {
6747 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6748 return omit_one_operand_loc (loc, type, tmp, arg00);
6749 }
6750 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6751
6752 case GT_EXPR:
6753 if (TREE_OVERFLOW (hi))
6754 {
6755 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6756 return omit_one_operand_loc (loc, type, tmp, arg00);
6757 }
6758 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6759
6760 case GE_EXPR:
6761 if (TREE_OVERFLOW (lo))
6762 {
6763 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6764 return omit_one_operand_loc (loc, type, tmp, arg00);
6765 }
6766 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6767
6768 default:
6769 break;
6770 }
6771
6772 return NULL_TREE;
6773 }
6774
6775
6776 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6777 equality/inequality test, then return a simplified form of the test
6778 using a sign testing. Otherwise return NULL. TYPE is the desired
6779 result type. */
6780
6781 static tree
6782 fold_single_bit_test_into_sign_test (location_t loc,
6783 enum tree_code code, tree arg0, tree arg1,
6784 tree result_type)
6785 {
6786 /* If this is testing a single bit, we can optimize the test. */
6787 if ((code == NE_EXPR || code == EQ_EXPR)
6788 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6789 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6790 {
6791 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6792 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6793 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6794
6795 if (arg00 != NULL_TREE
6796 /* This is only a win if casting to a signed type is cheap,
6797 i.e. when arg00's type is not a partial mode. */
6798 && TYPE_PRECISION (TREE_TYPE (arg00))
6799 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6800 {
6801 tree stype = signed_type_for (TREE_TYPE (arg00));
6802 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6803 result_type,
6804 fold_convert_loc (loc, stype, arg00),
6805 build_int_cst (stype, 0));
6806 }
6807 }
6808
6809 return NULL_TREE;
6810 }
6811
6812 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6813 equality/inequality test, then return a simplified form of
6814 the test using shifts and logical operations. Otherwise return
6815 NULL. TYPE is the desired result type. */
6816
6817 tree
6818 fold_single_bit_test (location_t loc, enum tree_code code,
6819 tree arg0, tree arg1, tree result_type)
6820 {
6821 /* If this is testing a single bit, we can optimize the test. */
6822 if ((code == NE_EXPR || code == EQ_EXPR)
6823 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6824 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6825 {
6826 tree inner = TREE_OPERAND (arg0, 0);
6827 tree type = TREE_TYPE (arg0);
6828 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6829 machine_mode operand_mode = TYPE_MODE (type);
6830 int ops_unsigned;
6831 tree signed_type, unsigned_type, intermediate_type;
6832 tree tem, one;
6833
6834 /* First, see if we can fold the single bit test into a sign-bit
6835 test. */
6836 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6837 result_type);
6838 if (tem)
6839 return tem;
6840
6841 /* Otherwise we have (A & C) != 0 where C is a single bit,
6842 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6843 Similarly for (A & C) == 0. */
6844
6845 /* If INNER is a right shift of a constant and it plus BITNUM does
6846 not overflow, adjust BITNUM and INNER. */
6847 if (TREE_CODE (inner) == RSHIFT_EXPR
6848 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6849 && bitnum < TYPE_PRECISION (type)
6850 && wi::ltu_p (TREE_OPERAND (inner, 1),
6851 TYPE_PRECISION (type) - bitnum))
6852 {
6853 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6854 inner = TREE_OPERAND (inner, 0);
6855 }
6856
6857 /* If we are going to be able to omit the AND below, we must do our
6858 operations as unsigned. If we must use the AND, we have a choice.
6859 Normally unsigned is faster, but for some machines signed is. */
6860 #ifdef LOAD_EXTEND_OP
6861 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6862 && !flag_syntax_only) ? 0 : 1;
6863 #else
6864 ops_unsigned = 1;
6865 #endif
6866
6867 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6868 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6869 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6870 inner = fold_convert_loc (loc, intermediate_type, inner);
6871
6872 if (bitnum != 0)
6873 inner = build2 (RSHIFT_EXPR, intermediate_type,
6874 inner, size_int (bitnum));
6875
6876 one = build_int_cst (intermediate_type, 1);
6877
6878 if (code == EQ_EXPR)
6879 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6880
6881 /* Put the AND last so it can combine with more things. */
6882 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6883
6884 /* Make sure to return the proper type. */
6885 inner = fold_convert_loc (loc, result_type, inner);
6886
6887 return inner;
6888 }
6889 return NULL_TREE;
6890 }
6891
6892 /* Check whether we are allowed to reorder operands arg0 and arg1,
6893 such that the evaluation of arg1 occurs before arg0. */
6894
6895 static bool
6896 reorder_operands_p (const_tree arg0, const_tree arg1)
6897 {
6898 if (! flag_evaluation_order)
6899 return true;
6900 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6901 return true;
6902 return ! TREE_SIDE_EFFECTS (arg0)
6903 && ! TREE_SIDE_EFFECTS (arg1);
6904 }
6905
6906 /* Test whether it is preferable two swap two operands, ARG0 and
6907 ARG1, for example because ARG0 is an integer constant and ARG1
6908 isn't. If REORDER is true, only recommend swapping if we can
6909 evaluate the operands in reverse order. */
6910
6911 bool
6912 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6913 {
6914 if (CONSTANT_CLASS_P (arg1))
6915 return 0;
6916 if (CONSTANT_CLASS_P (arg0))
6917 return 1;
6918
6919 STRIP_NOPS (arg0);
6920 STRIP_NOPS (arg1);
6921
6922 if (TREE_CONSTANT (arg1))
6923 return 0;
6924 if (TREE_CONSTANT (arg0))
6925 return 1;
6926
6927 if (reorder && flag_evaluation_order
6928 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6929 return 0;
6930
6931 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6932 for commutative and comparison operators. Ensuring a canonical
6933 form allows the optimizers to find additional redundancies without
6934 having to explicitly check for both orderings. */
6935 if (TREE_CODE (arg0) == SSA_NAME
6936 && TREE_CODE (arg1) == SSA_NAME
6937 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6938 return 1;
6939
6940 /* Put SSA_NAMEs last. */
6941 if (TREE_CODE (arg1) == SSA_NAME)
6942 return 0;
6943 if (TREE_CODE (arg0) == SSA_NAME)
6944 return 1;
6945
6946 /* Put variables last. */
6947 if (DECL_P (arg1))
6948 return 0;
6949 if (DECL_P (arg0))
6950 return 1;
6951
6952 return 0;
6953 }
6954
6955 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6956 ARG0 is extended to a wider type. */
6957
6958 static tree
6959 fold_widened_comparison (location_t loc, enum tree_code code,
6960 tree type, tree arg0, tree arg1)
6961 {
6962 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6963 tree arg1_unw;
6964 tree shorter_type, outer_type;
6965 tree min, max;
6966 bool above, below;
6967
6968 if (arg0_unw == arg0)
6969 return NULL_TREE;
6970 shorter_type = TREE_TYPE (arg0_unw);
6971
6972 #ifdef HAVE_canonicalize_funcptr_for_compare
6973 /* Disable this optimization if we're casting a function pointer
6974 type on targets that require function pointer canonicalization. */
6975 if (HAVE_canonicalize_funcptr_for_compare
6976 && TREE_CODE (shorter_type) == POINTER_TYPE
6977 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6978 return NULL_TREE;
6979 #endif
6980
6981 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6982 return NULL_TREE;
6983
6984 arg1_unw = get_unwidened (arg1, NULL_TREE);
6985
6986 /* If possible, express the comparison in the shorter mode. */
6987 if ((code == EQ_EXPR || code == NE_EXPR
6988 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6989 && (TREE_TYPE (arg1_unw) == shorter_type
6990 || ((TYPE_PRECISION (shorter_type)
6991 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6992 && (TYPE_UNSIGNED (shorter_type)
6993 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6994 || (TREE_CODE (arg1_unw) == INTEGER_CST
6995 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6996 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6997 && int_fits_type_p (arg1_unw, shorter_type))))
6998 return fold_build2_loc (loc, code, type, arg0_unw,
6999 fold_convert_loc (loc, shorter_type, arg1_unw));
7000
7001 if (TREE_CODE (arg1_unw) != INTEGER_CST
7002 || TREE_CODE (shorter_type) != INTEGER_TYPE
7003 || !int_fits_type_p (arg1_unw, shorter_type))
7004 return NULL_TREE;
7005
7006 /* If we are comparing with the integer that does not fit into the range
7007 of the shorter type, the result is known. */
7008 outer_type = TREE_TYPE (arg1_unw);
7009 min = lower_bound_in_type (outer_type, shorter_type);
7010 max = upper_bound_in_type (outer_type, shorter_type);
7011
7012 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7013 max, arg1_unw));
7014 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7015 arg1_unw, min));
7016
7017 switch (code)
7018 {
7019 case EQ_EXPR:
7020 if (above || below)
7021 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7022 break;
7023
7024 case NE_EXPR:
7025 if (above || below)
7026 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7027 break;
7028
7029 case LT_EXPR:
7030 case LE_EXPR:
7031 if (above)
7032 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7033 else if (below)
7034 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7035
7036 case GT_EXPR:
7037 case GE_EXPR:
7038 if (above)
7039 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7040 else if (below)
7041 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7042
7043 default:
7044 break;
7045 }
7046
7047 return NULL_TREE;
7048 }
7049
7050 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7051 ARG0 just the signedness is changed. */
7052
7053 static tree
7054 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7055 tree arg0, tree arg1)
7056 {
7057 tree arg0_inner;
7058 tree inner_type, outer_type;
7059
7060 if (!CONVERT_EXPR_P (arg0))
7061 return NULL_TREE;
7062
7063 outer_type = TREE_TYPE (arg0);
7064 arg0_inner = TREE_OPERAND (arg0, 0);
7065 inner_type = TREE_TYPE (arg0_inner);
7066
7067 #ifdef HAVE_canonicalize_funcptr_for_compare
7068 /* Disable this optimization if we're casting a function pointer
7069 type on targets that require function pointer canonicalization. */
7070 if (HAVE_canonicalize_funcptr_for_compare
7071 && TREE_CODE (inner_type) == POINTER_TYPE
7072 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7073 return NULL_TREE;
7074 #endif
7075
7076 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7077 return NULL_TREE;
7078
7079 if (TREE_CODE (arg1) != INTEGER_CST
7080 && !(CONVERT_EXPR_P (arg1)
7081 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7082 return NULL_TREE;
7083
7084 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7085 && code != NE_EXPR
7086 && code != EQ_EXPR)
7087 return NULL_TREE;
7088
7089 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7090 return NULL_TREE;
7091
7092 if (TREE_CODE (arg1) == INTEGER_CST)
7093 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7094 TREE_OVERFLOW (arg1));
7095 else
7096 arg1 = fold_convert_loc (loc, inner_type, arg1);
7097
7098 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7099 }
7100
7101
7102 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7103 means A >= Y && A != MAX, but in this case we know that
7104 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7105
7106 static tree
7107 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7108 {
7109 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7110
7111 if (TREE_CODE (bound) == LT_EXPR)
7112 a = TREE_OPERAND (bound, 0);
7113 else if (TREE_CODE (bound) == GT_EXPR)
7114 a = TREE_OPERAND (bound, 1);
7115 else
7116 return NULL_TREE;
7117
7118 typea = TREE_TYPE (a);
7119 if (!INTEGRAL_TYPE_P (typea)
7120 && !POINTER_TYPE_P (typea))
7121 return NULL_TREE;
7122
7123 if (TREE_CODE (ineq) == LT_EXPR)
7124 {
7125 a1 = TREE_OPERAND (ineq, 1);
7126 y = TREE_OPERAND (ineq, 0);
7127 }
7128 else if (TREE_CODE (ineq) == GT_EXPR)
7129 {
7130 a1 = TREE_OPERAND (ineq, 0);
7131 y = TREE_OPERAND (ineq, 1);
7132 }
7133 else
7134 return NULL_TREE;
7135
7136 if (TREE_TYPE (a1) != typea)
7137 return NULL_TREE;
7138
7139 if (POINTER_TYPE_P (typea))
7140 {
7141 /* Convert the pointer types into integer before taking the difference. */
7142 tree ta = fold_convert_loc (loc, ssizetype, a);
7143 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7144 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7145 }
7146 else
7147 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7148
7149 if (!diff || !integer_onep (diff))
7150 return NULL_TREE;
7151
7152 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7153 }
7154
7155 /* Fold a sum or difference of at least one multiplication.
7156 Returns the folded tree or NULL if no simplification could be made. */
7157
7158 static tree
7159 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7160 tree arg0, tree arg1)
7161 {
7162 tree arg00, arg01, arg10, arg11;
7163 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7164
7165 /* (A * C) +- (B * C) -> (A+-B) * C.
7166 (A * C) +- A -> A * (C+-1).
7167 We are most concerned about the case where C is a constant,
7168 but other combinations show up during loop reduction. Since
7169 it is not difficult, try all four possibilities. */
7170
7171 if (TREE_CODE (arg0) == MULT_EXPR)
7172 {
7173 arg00 = TREE_OPERAND (arg0, 0);
7174 arg01 = TREE_OPERAND (arg0, 1);
7175 }
7176 else if (TREE_CODE (arg0) == INTEGER_CST)
7177 {
7178 arg00 = build_one_cst (type);
7179 arg01 = arg0;
7180 }
7181 else
7182 {
7183 /* We cannot generate constant 1 for fract. */
7184 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7185 return NULL_TREE;
7186 arg00 = arg0;
7187 arg01 = build_one_cst (type);
7188 }
7189 if (TREE_CODE (arg1) == MULT_EXPR)
7190 {
7191 arg10 = TREE_OPERAND (arg1, 0);
7192 arg11 = TREE_OPERAND (arg1, 1);
7193 }
7194 else if (TREE_CODE (arg1) == INTEGER_CST)
7195 {
7196 arg10 = build_one_cst (type);
7197 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7198 the purpose of this canonicalization. */
7199 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7200 && negate_expr_p (arg1)
7201 && code == PLUS_EXPR)
7202 {
7203 arg11 = negate_expr (arg1);
7204 code = MINUS_EXPR;
7205 }
7206 else
7207 arg11 = arg1;
7208 }
7209 else
7210 {
7211 /* We cannot generate constant 1 for fract. */
7212 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7213 return NULL_TREE;
7214 arg10 = arg1;
7215 arg11 = build_one_cst (type);
7216 }
7217 same = NULL_TREE;
7218
7219 if (operand_equal_p (arg01, arg11, 0))
7220 same = arg01, alt0 = arg00, alt1 = arg10;
7221 else if (operand_equal_p (arg00, arg10, 0))
7222 same = arg00, alt0 = arg01, alt1 = arg11;
7223 else if (operand_equal_p (arg00, arg11, 0))
7224 same = arg00, alt0 = arg01, alt1 = arg10;
7225 else if (operand_equal_p (arg01, arg10, 0))
7226 same = arg01, alt0 = arg00, alt1 = arg11;
7227
7228 /* No identical multiplicands; see if we can find a common
7229 power-of-two factor in non-power-of-two multiplies. This
7230 can help in multi-dimensional array access. */
7231 else if (tree_fits_shwi_p (arg01)
7232 && tree_fits_shwi_p (arg11))
7233 {
7234 HOST_WIDE_INT int01, int11, tmp;
7235 bool swap = false;
7236 tree maybe_same;
7237 int01 = tree_to_shwi (arg01);
7238 int11 = tree_to_shwi (arg11);
7239
7240 /* Move min of absolute values to int11. */
7241 if (absu_hwi (int01) < absu_hwi (int11))
7242 {
7243 tmp = int01, int01 = int11, int11 = tmp;
7244 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7245 maybe_same = arg01;
7246 swap = true;
7247 }
7248 else
7249 maybe_same = arg11;
7250
7251 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7252 /* The remainder should not be a constant, otherwise we
7253 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7254 increased the number of multiplications necessary. */
7255 && TREE_CODE (arg10) != INTEGER_CST)
7256 {
7257 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7258 build_int_cst (TREE_TYPE (arg00),
7259 int01 / int11));
7260 alt1 = arg10;
7261 same = maybe_same;
7262 if (swap)
7263 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7264 }
7265 }
7266
7267 if (same)
7268 return fold_build2_loc (loc, MULT_EXPR, type,
7269 fold_build2_loc (loc, code, type,
7270 fold_convert_loc (loc, type, alt0),
7271 fold_convert_loc (loc, type, alt1)),
7272 fold_convert_loc (loc, type, same));
7273
7274 return NULL_TREE;
7275 }
7276
7277 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7278 specified by EXPR into the buffer PTR of length LEN bytes.
7279 Return the number of bytes placed in the buffer, or zero
7280 upon failure. */
7281
7282 static int
7283 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7284 {
7285 tree type = TREE_TYPE (expr);
7286 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7287 int byte, offset, word, words;
7288 unsigned char value;
7289
7290 if ((off == -1 && total_bytes > len)
7291 || off >= total_bytes)
7292 return 0;
7293 if (off == -1)
7294 off = 0;
7295 words = total_bytes / UNITS_PER_WORD;
7296
7297 for (byte = 0; byte < total_bytes; byte++)
7298 {
7299 int bitpos = byte * BITS_PER_UNIT;
7300 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7301 number of bytes. */
7302 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7303
7304 if (total_bytes > UNITS_PER_WORD)
7305 {
7306 word = byte / UNITS_PER_WORD;
7307 if (WORDS_BIG_ENDIAN)
7308 word = (words - 1) - word;
7309 offset = word * UNITS_PER_WORD;
7310 if (BYTES_BIG_ENDIAN)
7311 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7312 else
7313 offset += byte % UNITS_PER_WORD;
7314 }
7315 else
7316 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7317 if (offset >= off
7318 && offset - off < len)
7319 ptr[offset - off] = value;
7320 }
7321 return MIN (len, total_bytes - off);
7322 }
7323
7324
7325 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7326 specified by EXPR into the buffer PTR of length LEN bytes.
7327 Return the number of bytes placed in the buffer, or zero
7328 upon failure. */
7329
7330 static int
7331 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7332 {
7333 tree type = TREE_TYPE (expr);
7334 machine_mode mode = TYPE_MODE (type);
7335 int total_bytes = GET_MODE_SIZE (mode);
7336 FIXED_VALUE_TYPE value;
7337 tree i_value, i_type;
7338
7339 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7340 return 0;
7341
7342 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7343
7344 if (NULL_TREE == i_type
7345 || TYPE_PRECISION (i_type) != total_bytes)
7346 return 0;
7347
7348 value = TREE_FIXED_CST (expr);
7349 i_value = double_int_to_tree (i_type, value.data);
7350
7351 return native_encode_int (i_value, ptr, len, off);
7352 }
7353
7354
7355 /* Subroutine of native_encode_expr. Encode the REAL_CST
7356 specified by EXPR into the buffer PTR of length LEN bytes.
7357 Return the number of bytes placed in the buffer, or zero
7358 upon failure. */
7359
7360 static int
7361 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7362 {
7363 tree type = TREE_TYPE (expr);
7364 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7365 int byte, offset, word, words, bitpos;
7366 unsigned char value;
7367
7368 /* There are always 32 bits in each long, no matter the size of
7369 the hosts long. We handle floating point representations with
7370 up to 192 bits. */
7371 long tmp[6];
7372
7373 if ((off == -1 && total_bytes > len)
7374 || off >= total_bytes)
7375 return 0;
7376 if (off == -1)
7377 off = 0;
7378 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7379
7380 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7381
7382 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7383 bitpos += BITS_PER_UNIT)
7384 {
7385 byte = (bitpos / BITS_PER_UNIT) & 3;
7386 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7387
7388 if (UNITS_PER_WORD < 4)
7389 {
7390 word = byte / UNITS_PER_WORD;
7391 if (WORDS_BIG_ENDIAN)
7392 word = (words - 1) - word;
7393 offset = word * UNITS_PER_WORD;
7394 if (BYTES_BIG_ENDIAN)
7395 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7396 else
7397 offset += byte % UNITS_PER_WORD;
7398 }
7399 else
7400 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7401 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7402 if (offset >= off
7403 && offset - off < len)
7404 ptr[offset - off] = value;
7405 }
7406 return MIN (len, total_bytes - off);
7407 }
7408
7409 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7410 specified by EXPR into the buffer PTR of length LEN bytes.
7411 Return the number of bytes placed in the buffer, or zero
7412 upon failure. */
7413
7414 static int
7415 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7416 {
7417 int rsize, isize;
7418 tree part;
7419
7420 part = TREE_REALPART (expr);
7421 rsize = native_encode_expr (part, ptr, len, off);
7422 if (off == -1
7423 && rsize == 0)
7424 return 0;
7425 part = TREE_IMAGPART (expr);
7426 if (off != -1)
7427 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7428 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7429 if (off == -1
7430 && isize != rsize)
7431 return 0;
7432 return rsize + isize;
7433 }
7434
7435
7436 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7437 specified by EXPR into the buffer PTR of length LEN bytes.
7438 Return the number of bytes placed in the buffer, or zero
7439 upon failure. */
7440
7441 static int
7442 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7443 {
7444 unsigned i, count;
7445 int size, offset;
7446 tree itype, elem;
7447
7448 offset = 0;
7449 count = VECTOR_CST_NELTS (expr);
7450 itype = TREE_TYPE (TREE_TYPE (expr));
7451 size = GET_MODE_SIZE (TYPE_MODE (itype));
7452 for (i = 0; i < count; i++)
7453 {
7454 if (off >= size)
7455 {
7456 off -= size;
7457 continue;
7458 }
7459 elem = VECTOR_CST_ELT (expr, i);
7460 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7461 if ((off == -1 && res != size)
7462 || res == 0)
7463 return 0;
7464 offset += res;
7465 if (offset >= len)
7466 return offset;
7467 if (off != -1)
7468 off = 0;
7469 }
7470 return offset;
7471 }
7472
7473
7474 /* Subroutine of native_encode_expr. Encode the STRING_CST
7475 specified by EXPR into the buffer PTR of length LEN bytes.
7476 Return the number of bytes placed in the buffer, or zero
7477 upon failure. */
7478
7479 static int
7480 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7481 {
7482 tree type = TREE_TYPE (expr);
7483 HOST_WIDE_INT total_bytes;
7484
7485 if (TREE_CODE (type) != ARRAY_TYPE
7486 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7487 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7488 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7489 return 0;
7490 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7491 if ((off == -1 && total_bytes > len)
7492 || off >= total_bytes)
7493 return 0;
7494 if (off == -1)
7495 off = 0;
7496 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7497 {
7498 int written = 0;
7499 if (off < TREE_STRING_LENGTH (expr))
7500 {
7501 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7502 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7503 }
7504 memset (ptr + written, 0,
7505 MIN (total_bytes - written, len - written));
7506 }
7507 else
7508 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7509 return MIN (total_bytes - off, len);
7510 }
7511
7512
7513 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7514 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7515 buffer PTR of length LEN bytes. If OFF is not -1 then start
7516 the encoding at byte offset OFF and encode at most LEN bytes.
7517 Return the number of bytes placed in the buffer, or zero upon failure. */
7518
7519 int
7520 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7521 {
7522 switch (TREE_CODE (expr))
7523 {
7524 case INTEGER_CST:
7525 return native_encode_int (expr, ptr, len, off);
7526
7527 case REAL_CST:
7528 return native_encode_real (expr, ptr, len, off);
7529
7530 case FIXED_CST:
7531 return native_encode_fixed (expr, ptr, len, off);
7532
7533 case COMPLEX_CST:
7534 return native_encode_complex (expr, ptr, len, off);
7535
7536 case VECTOR_CST:
7537 return native_encode_vector (expr, ptr, len, off);
7538
7539 case STRING_CST:
7540 return native_encode_string (expr, ptr, len, off);
7541
7542 default:
7543 return 0;
7544 }
7545 }
7546
7547
7548 /* Subroutine of native_interpret_expr. Interpret the contents of
7549 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7550 If the buffer cannot be interpreted, return NULL_TREE. */
7551
7552 static tree
7553 native_interpret_int (tree type, const unsigned char *ptr, int len)
7554 {
7555 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7556
7557 if (total_bytes > len
7558 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7559 return NULL_TREE;
7560
7561 wide_int result = wi::from_buffer (ptr, total_bytes);
7562
7563 return wide_int_to_tree (type, result);
7564 }
7565
7566
7567 /* Subroutine of native_interpret_expr. Interpret the contents of
7568 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7569 If the buffer cannot be interpreted, return NULL_TREE. */
7570
7571 static tree
7572 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7573 {
7574 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7575 double_int result;
7576 FIXED_VALUE_TYPE fixed_value;
7577
7578 if (total_bytes > len
7579 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7580 return NULL_TREE;
7581
7582 result = double_int::from_buffer (ptr, total_bytes);
7583 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7584
7585 return build_fixed (type, fixed_value);
7586 }
7587
7588
7589 /* Subroutine of native_interpret_expr. Interpret the contents of
7590 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7591 If the buffer cannot be interpreted, return NULL_TREE. */
7592
7593 static tree
7594 native_interpret_real (tree type, const unsigned char *ptr, int len)
7595 {
7596 machine_mode mode = TYPE_MODE (type);
7597 int total_bytes = GET_MODE_SIZE (mode);
7598 int byte, offset, word, words, bitpos;
7599 unsigned char value;
7600 /* There are always 32 bits in each long, no matter the size of
7601 the hosts long. We handle floating point representations with
7602 up to 192 bits. */
7603 REAL_VALUE_TYPE r;
7604 long tmp[6];
7605
7606 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7607 if (total_bytes > len || total_bytes > 24)
7608 return NULL_TREE;
7609 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7610
7611 memset (tmp, 0, sizeof (tmp));
7612 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7613 bitpos += BITS_PER_UNIT)
7614 {
7615 byte = (bitpos / BITS_PER_UNIT) & 3;
7616 if (UNITS_PER_WORD < 4)
7617 {
7618 word = byte / UNITS_PER_WORD;
7619 if (WORDS_BIG_ENDIAN)
7620 word = (words - 1) - word;
7621 offset = word * UNITS_PER_WORD;
7622 if (BYTES_BIG_ENDIAN)
7623 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7624 else
7625 offset += byte % UNITS_PER_WORD;
7626 }
7627 else
7628 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7629 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7630
7631 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7632 }
7633
7634 real_from_target (&r, tmp, mode);
7635 return build_real (type, r);
7636 }
7637
7638
7639 /* Subroutine of native_interpret_expr. Interpret the contents of
7640 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7641 If the buffer cannot be interpreted, return NULL_TREE. */
7642
7643 static tree
7644 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7645 {
7646 tree etype, rpart, ipart;
7647 int size;
7648
7649 etype = TREE_TYPE (type);
7650 size = GET_MODE_SIZE (TYPE_MODE (etype));
7651 if (size * 2 > len)
7652 return NULL_TREE;
7653 rpart = native_interpret_expr (etype, ptr, size);
7654 if (!rpart)
7655 return NULL_TREE;
7656 ipart = native_interpret_expr (etype, ptr+size, size);
7657 if (!ipart)
7658 return NULL_TREE;
7659 return build_complex (type, rpart, ipart);
7660 }
7661
7662
7663 /* Subroutine of native_interpret_expr. Interpret the contents of
7664 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7665 If the buffer cannot be interpreted, return NULL_TREE. */
7666
7667 static tree
7668 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7669 {
7670 tree etype, elem;
7671 int i, size, count;
7672 tree *elements;
7673
7674 etype = TREE_TYPE (type);
7675 size = GET_MODE_SIZE (TYPE_MODE (etype));
7676 count = TYPE_VECTOR_SUBPARTS (type);
7677 if (size * count > len)
7678 return NULL_TREE;
7679
7680 elements = XALLOCAVEC (tree, count);
7681 for (i = count - 1; i >= 0; i--)
7682 {
7683 elem = native_interpret_expr (etype, ptr+(i*size), size);
7684 if (!elem)
7685 return NULL_TREE;
7686 elements[i] = elem;
7687 }
7688 return build_vector (type, elements);
7689 }
7690
7691
7692 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7693 the buffer PTR of length LEN as a constant of type TYPE. For
7694 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7695 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7696 return NULL_TREE. */
7697
7698 tree
7699 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7700 {
7701 switch (TREE_CODE (type))
7702 {
7703 case INTEGER_TYPE:
7704 case ENUMERAL_TYPE:
7705 case BOOLEAN_TYPE:
7706 case POINTER_TYPE:
7707 case REFERENCE_TYPE:
7708 return native_interpret_int (type, ptr, len);
7709
7710 case REAL_TYPE:
7711 return native_interpret_real (type, ptr, len);
7712
7713 case FIXED_POINT_TYPE:
7714 return native_interpret_fixed (type, ptr, len);
7715
7716 case COMPLEX_TYPE:
7717 return native_interpret_complex (type, ptr, len);
7718
7719 case VECTOR_TYPE:
7720 return native_interpret_vector (type, ptr, len);
7721
7722 default:
7723 return NULL_TREE;
7724 }
7725 }
7726
7727 /* Returns true if we can interpret the contents of a native encoding
7728 as TYPE. */
7729
7730 static bool
7731 can_native_interpret_type_p (tree type)
7732 {
7733 switch (TREE_CODE (type))
7734 {
7735 case INTEGER_TYPE:
7736 case ENUMERAL_TYPE:
7737 case BOOLEAN_TYPE:
7738 case POINTER_TYPE:
7739 case REFERENCE_TYPE:
7740 case FIXED_POINT_TYPE:
7741 case REAL_TYPE:
7742 case COMPLEX_TYPE:
7743 case VECTOR_TYPE:
7744 return true;
7745 default:
7746 return false;
7747 }
7748 }
7749
7750 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7751 TYPE at compile-time. If we're unable to perform the conversion
7752 return NULL_TREE. */
7753
7754 static tree
7755 fold_view_convert_expr (tree type, tree expr)
7756 {
7757 /* We support up to 512-bit values (for V8DFmode). */
7758 unsigned char buffer[64];
7759 int len;
7760
7761 /* Check that the host and target are sane. */
7762 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7763 return NULL_TREE;
7764
7765 len = native_encode_expr (expr, buffer, sizeof (buffer));
7766 if (len == 0)
7767 return NULL_TREE;
7768
7769 return native_interpret_expr (type, buffer, len);
7770 }
7771
7772 /* Build an expression for the address of T. Folds away INDIRECT_REF
7773 to avoid confusing the gimplify process. */
7774
7775 tree
7776 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7777 {
7778 /* The size of the object is not relevant when talking about its address. */
7779 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7780 t = TREE_OPERAND (t, 0);
7781
7782 if (TREE_CODE (t) == INDIRECT_REF)
7783 {
7784 t = TREE_OPERAND (t, 0);
7785
7786 if (TREE_TYPE (t) != ptrtype)
7787 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7788 }
7789 else if (TREE_CODE (t) == MEM_REF
7790 && integer_zerop (TREE_OPERAND (t, 1)))
7791 return TREE_OPERAND (t, 0);
7792 else if (TREE_CODE (t) == MEM_REF
7793 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7794 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7795 TREE_OPERAND (t, 0),
7796 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7797 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7798 {
7799 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7800
7801 if (TREE_TYPE (t) != ptrtype)
7802 t = fold_convert_loc (loc, ptrtype, t);
7803 }
7804 else
7805 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7806
7807 return t;
7808 }
7809
7810 /* Build an expression for the address of T. */
7811
7812 tree
7813 build_fold_addr_expr_loc (location_t loc, tree t)
7814 {
7815 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7816
7817 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7818 }
7819
7820 /* Fold a unary expression of code CODE and type TYPE with operand
7821 OP0. Return the folded expression if folding is successful.
7822 Otherwise, return NULL_TREE. */
7823
7824 tree
7825 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7826 {
7827 tree tem;
7828 tree arg0;
7829 enum tree_code_class kind = TREE_CODE_CLASS (code);
7830
7831 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7832 && TREE_CODE_LENGTH (code) == 1);
7833
7834 arg0 = op0;
7835 if (arg0)
7836 {
7837 if (CONVERT_EXPR_CODE_P (code)
7838 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7839 {
7840 /* Don't use STRIP_NOPS, because signedness of argument type
7841 matters. */
7842 STRIP_SIGN_NOPS (arg0);
7843 }
7844 else
7845 {
7846 /* Strip any conversions that don't change the mode. This
7847 is safe for every expression, except for a comparison
7848 expression because its signedness is derived from its
7849 operands.
7850
7851 Note that this is done as an internal manipulation within
7852 the constant folder, in order to find the simplest
7853 representation of the arguments so that their form can be
7854 studied. In any cases, the appropriate type conversions
7855 should be put back in the tree that will get out of the
7856 constant folder. */
7857 STRIP_NOPS (arg0);
7858 }
7859
7860 if (CONSTANT_CLASS_P (arg0))
7861 {
7862 tree tem = const_unop (code, type, arg0);
7863 if (tem)
7864 {
7865 if (TREE_TYPE (tem) != type)
7866 tem = fold_convert_loc (loc, type, tem);
7867 return tem;
7868 }
7869 }
7870 }
7871
7872 tem = generic_simplify (loc, code, type, op0);
7873 if (tem)
7874 return tem;
7875
7876 if (TREE_CODE_CLASS (code) == tcc_unary)
7877 {
7878 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7879 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7880 fold_build1_loc (loc, code, type,
7881 fold_convert_loc (loc, TREE_TYPE (op0),
7882 TREE_OPERAND (arg0, 1))));
7883 else if (TREE_CODE (arg0) == COND_EXPR)
7884 {
7885 tree arg01 = TREE_OPERAND (arg0, 1);
7886 tree arg02 = TREE_OPERAND (arg0, 2);
7887 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7888 arg01 = fold_build1_loc (loc, code, type,
7889 fold_convert_loc (loc,
7890 TREE_TYPE (op0), arg01));
7891 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7892 arg02 = fold_build1_loc (loc, code, type,
7893 fold_convert_loc (loc,
7894 TREE_TYPE (op0), arg02));
7895 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7896 arg01, arg02);
7897
7898 /* If this was a conversion, and all we did was to move into
7899 inside the COND_EXPR, bring it back out. But leave it if
7900 it is a conversion from integer to integer and the
7901 result precision is no wider than a word since such a
7902 conversion is cheap and may be optimized away by combine,
7903 while it couldn't if it were outside the COND_EXPR. Then return
7904 so we don't get into an infinite recursion loop taking the
7905 conversion out and then back in. */
7906
7907 if ((CONVERT_EXPR_CODE_P (code)
7908 || code == NON_LVALUE_EXPR)
7909 && TREE_CODE (tem) == COND_EXPR
7910 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7911 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7912 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7913 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7914 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7915 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7916 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7917 && (INTEGRAL_TYPE_P
7918 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7919 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7920 || flag_syntax_only))
7921 tem = build1_loc (loc, code, type,
7922 build3 (COND_EXPR,
7923 TREE_TYPE (TREE_OPERAND
7924 (TREE_OPERAND (tem, 1), 0)),
7925 TREE_OPERAND (tem, 0),
7926 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7927 TREE_OPERAND (TREE_OPERAND (tem, 2),
7928 0)));
7929 return tem;
7930 }
7931 }
7932
7933 switch (code)
7934 {
7935 case NON_LVALUE_EXPR:
7936 if (!maybe_lvalue_p (op0))
7937 return fold_convert_loc (loc, type, op0);
7938 return NULL_TREE;
7939
7940 CASE_CONVERT:
7941 case FLOAT_EXPR:
7942 case FIX_TRUNC_EXPR:
7943 if (COMPARISON_CLASS_P (op0))
7944 {
7945 /* If we have (type) (a CMP b) and type is an integral type, return
7946 new expression involving the new type. Canonicalize
7947 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7948 non-integral type.
7949 Do not fold the result as that would not simplify further, also
7950 folding again results in recursions. */
7951 if (TREE_CODE (type) == BOOLEAN_TYPE)
7952 return build2_loc (loc, TREE_CODE (op0), type,
7953 TREE_OPERAND (op0, 0),
7954 TREE_OPERAND (op0, 1));
7955 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7956 && TREE_CODE (type) != VECTOR_TYPE)
7957 return build3_loc (loc, COND_EXPR, type, op0,
7958 constant_boolean_node (true, type),
7959 constant_boolean_node (false, type));
7960 }
7961
7962 /* Handle (T *)&A.B.C for A being of type T and B and C
7963 living at offset zero. This occurs frequently in
7964 C++ upcasting and then accessing the base. */
7965 if (TREE_CODE (op0) == ADDR_EXPR
7966 && POINTER_TYPE_P (type)
7967 && handled_component_p (TREE_OPERAND (op0, 0)))
7968 {
7969 HOST_WIDE_INT bitsize, bitpos;
7970 tree offset;
7971 machine_mode mode;
7972 int unsignedp, volatilep;
7973 tree base = TREE_OPERAND (op0, 0);
7974 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7975 &mode, &unsignedp, &volatilep, false);
7976 /* If the reference was to a (constant) zero offset, we can use
7977 the address of the base if it has the same base type
7978 as the result type and the pointer type is unqualified. */
7979 if (! offset && bitpos == 0
7980 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7981 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7982 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7983 return fold_convert_loc (loc, type,
7984 build_fold_addr_expr_loc (loc, base));
7985 }
7986
7987 if (TREE_CODE (op0) == MODIFY_EXPR
7988 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7989 /* Detect assigning a bitfield. */
7990 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7991 && DECL_BIT_FIELD
7992 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7993 {
7994 /* Don't leave an assignment inside a conversion
7995 unless assigning a bitfield. */
7996 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7997 /* First do the assignment, then return converted constant. */
7998 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7999 TREE_NO_WARNING (tem) = 1;
8000 TREE_USED (tem) = 1;
8001 return tem;
8002 }
8003
8004 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8005 constants (if x has signed type, the sign bit cannot be set
8006 in c). This folds extension into the BIT_AND_EXPR.
8007 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8008 very likely don't have maximal range for their precision and this
8009 transformation effectively doesn't preserve non-maximal ranges. */
8010 if (TREE_CODE (type) == INTEGER_TYPE
8011 && TREE_CODE (op0) == BIT_AND_EXPR
8012 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8013 {
8014 tree and_expr = op0;
8015 tree and0 = TREE_OPERAND (and_expr, 0);
8016 tree and1 = TREE_OPERAND (and_expr, 1);
8017 int change = 0;
8018
8019 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8020 || (TYPE_PRECISION (type)
8021 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8022 change = 1;
8023 else if (TYPE_PRECISION (TREE_TYPE (and1))
8024 <= HOST_BITS_PER_WIDE_INT
8025 && tree_fits_uhwi_p (and1))
8026 {
8027 unsigned HOST_WIDE_INT cst;
8028
8029 cst = tree_to_uhwi (and1);
8030 cst &= HOST_WIDE_INT_M1U
8031 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8032 change = (cst == 0);
8033 #ifdef LOAD_EXTEND_OP
8034 if (change
8035 && !flag_syntax_only
8036 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8037 == ZERO_EXTEND))
8038 {
8039 tree uns = unsigned_type_for (TREE_TYPE (and0));
8040 and0 = fold_convert_loc (loc, uns, and0);
8041 and1 = fold_convert_loc (loc, uns, and1);
8042 }
8043 #endif
8044 }
8045 if (change)
8046 {
8047 tem = force_fit_type (type, wi::to_widest (and1), 0,
8048 TREE_OVERFLOW (and1));
8049 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8050 fold_convert_loc (loc, type, and0), tem);
8051 }
8052 }
8053
8054 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8055 when one of the new casts will fold away. Conservatively we assume
8056 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8057 if (POINTER_TYPE_P (type)
8058 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8059 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8060 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8061 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8062 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8063 {
8064 tree arg00 = TREE_OPERAND (arg0, 0);
8065 tree arg01 = TREE_OPERAND (arg0, 1);
8066
8067 return fold_build_pointer_plus_loc
8068 (loc, fold_convert_loc (loc, type, arg00), arg01);
8069 }
8070
8071 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8072 of the same precision, and X is an integer type not narrower than
8073 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8074 if (INTEGRAL_TYPE_P (type)
8075 && TREE_CODE (op0) == BIT_NOT_EXPR
8076 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8077 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8078 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8079 {
8080 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8081 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8082 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8083 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8084 fold_convert_loc (loc, type, tem));
8085 }
8086
8087 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8088 type of X and Y (integer types only). */
8089 if (INTEGRAL_TYPE_P (type)
8090 && TREE_CODE (op0) == MULT_EXPR
8091 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8092 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8093 {
8094 /* Be careful not to introduce new overflows. */
8095 tree mult_type;
8096 if (TYPE_OVERFLOW_WRAPS (type))
8097 mult_type = type;
8098 else
8099 mult_type = unsigned_type_for (type);
8100
8101 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8102 {
8103 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8104 fold_convert_loc (loc, mult_type,
8105 TREE_OPERAND (op0, 0)),
8106 fold_convert_loc (loc, mult_type,
8107 TREE_OPERAND (op0, 1)));
8108 return fold_convert_loc (loc, type, tem);
8109 }
8110 }
8111
8112 return NULL_TREE;
8113
8114 case VIEW_CONVERT_EXPR:
8115 if (TREE_CODE (op0) == MEM_REF)
8116 return fold_build2_loc (loc, MEM_REF, type,
8117 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8118
8119 return NULL_TREE;
8120
8121 case NEGATE_EXPR:
8122 tem = fold_negate_expr (loc, arg0);
8123 if (tem)
8124 return fold_convert_loc (loc, type, tem);
8125 return NULL_TREE;
8126
8127 case ABS_EXPR:
8128 /* Convert fabs((double)float) into (double)fabsf(float). */
8129 if (TREE_CODE (arg0) == NOP_EXPR
8130 && TREE_CODE (type) == REAL_TYPE)
8131 {
8132 tree targ0 = strip_float_extensions (arg0);
8133 if (targ0 != arg0)
8134 return fold_convert_loc (loc, type,
8135 fold_build1_loc (loc, ABS_EXPR,
8136 TREE_TYPE (targ0),
8137 targ0));
8138 }
8139 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8140 else if (TREE_CODE (arg0) == ABS_EXPR)
8141 return arg0;
8142
8143 /* Strip sign ops from argument. */
8144 if (TREE_CODE (type) == REAL_TYPE)
8145 {
8146 tem = fold_strip_sign_ops (arg0);
8147 if (tem)
8148 return fold_build1_loc (loc, ABS_EXPR, type,
8149 fold_convert_loc (loc, type, tem));
8150 }
8151 return NULL_TREE;
8152
8153 case CONJ_EXPR:
8154 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8155 return fold_convert_loc (loc, type, arg0);
8156 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8157 {
8158 tree itype = TREE_TYPE (type);
8159 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8160 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8161 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8162 negate_expr (ipart));
8163 }
8164 if (TREE_CODE (arg0) == CONJ_EXPR)
8165 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8166 return NULL_TREE;
8167
8168 case BIT_NOT_EXPR:
8169 /* Convert ~ (-A) to A - 1. */
8170 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8171 return fold_build2_loc (loc, MINUS_EXPR, type,
8172 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8173 build_int_cst (type, 1));
8174 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8175 else if (INTEGRAL_TYPE_P (type)
8176 && ((TREE_CODE (arg0) == MINUS_EXPR
8177 && integer_onep (TREE_OPERAND (arg0, 1)))
8178 || (TREE_CODE (arg0) == PLUS_EXPR
8179 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8180 {
8181 /* Perform the negation in ARG0's type and only then convert
8182 to TYPE as to avoid introducing undefined behavior. */
8183 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8184 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8185 TREE_OPERAND (arg0, 0));
8186 return fold_convert_loc (loc, type, t);
8187 }
8188 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8189 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8190 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8191 fold_convert_loc (loc, type,
8192 TREE_OPERAND (arg0, 0)))))
8193 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8194 fold_convert_loc (loc, type,
8195 TREE_OPERAND (arg0, 1)));
8196 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8197 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8198 fold_convert_loc (loc, type,
8199 TREE_OPERAND (arg0, 1)))))
8200 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8201 fold_convert_loc (loc, type,
8202 TREE_OPERAND (arg0, 0)), tem);
8203
8204 return NULL_TREE;
8205
8206 case TRUTH_NOT_EXPR:
8207 /* Note that the operand of this must be an int
8208 and its values must be 0 or 1.
8209 ("true" is a fixed value perhaps depending on the language,
8210 but we don't handle values other than 1 correctly yet.) */
8211 tem = fold_truth_not_expr (loc, arg0);
8212 if (!tem)
8213 return NULL_TREE;
8214 return fold_convert_loc (loc, type, tem);
8215
8216 case REALPART_EXPR:
8217 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8218 return fold_convert_loc (loc, type, arg0);
8219 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8220 {
8221 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8222 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8223 fold_build1_loc (loc, REALPART_EXPR, itype,
8224 TREE_OPERAND (arg0, 0)),
8225 fold_build1_loc (loc, REALPART_EXPR, itype,
8226 TREE_OPERAND (arg0, 1)));
8227 return fold_convert_loc (loc, type, tem);
8228 }
8229 if (TREE_CODE (arg0) == CONJ_EXPR)
8230 {
8231 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8232 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8233 TREE_OPERAND (arg0, 0));
8234 return fold_convert_loc (loc, type, tem);
8235 }
8236 if (TREE_CODE (arg0) == CALL_EXPR)
8237 {
8238 tree fn = get_callee_fndecl (arg0);
8239 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8240 switch (DECL_FUNCTION_CODE (fn))
8241 {
8242 CASE_FLT_FN (BUILT_IN_CEXPI):
8243 fn = mathfn_built_in (type, BUILT_IN_COS);
8244 if (fn)
8245 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8246 break;
8247
8248 default:
8249 break;
8250 }
8251 }
8252 return NULL_TREE;
8253
8254 case IMAGPART_EXPR:
8255 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8256 return build_zero_cst (type);
8257 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8258 {
8259 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8260 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8261 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8262 TREE_OPERAND (arg0, 0)),
8263 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8264 TREE_OPERAND (arg0, 1)));
8265 return fold_convert_loc (loc, type, tem);
8266 }
8267 if (TREE_CODE (arg0) == CONJ_EXPR)
8268 {
8269 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8270 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8271 return fold_convert_loc (loc, type, negate_expr (tem));
8272 }
8273 if (TREE_CODE (arg0) == CALL_EXPR)
8274 {
8275 tree fn = get_callee_fndecl (arg0);
8276 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8277 switch (DECL_FUNCTION_CODE (fn))
8278 {
8279 CASE_FLT_FN (BUILT_IN_CEXPI):
8280 fn = mathfn_built_in (type, BUILT_IN_SIN);
8281 if (fn)
8282 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8283 break;
8284
8285 default:
8286 break;
8287 }
8288 }
8289 return NULL_TREE;
8290
8291 case INDIRECT_REF:
8292 /* Fold *&X to X if X is an lvalue. */
8293 if (TREE_CODE (op0) == ADDR_EXPR)
8294 {
8295 tree op00 = TREE_OPERAND (op0, 0);
8296 if ((TREE_CODE (op00) == VAR_DECL
8297 || TREE_CODE (op00) == PARM_DECL
8298 || TREE_CODE (op00) == RESULT_DECL)
8299 && !TREE_READONLY (op00))
8300 return op00;
8301 }
8302 return NULL_TREE;
8303
8304 default:
8305 return NULL_TREE;
8306 } /* switch (code) */
8307 }
8308
8309
8310 /* If the operation was a conversion do _not_ mark a resulting constant
8311 with TREE_OVERFLOW if the original constant was not. These conversions
8312 have implementation defined behavior and retaining the TREE_OVERFLOW
8313 flag here would confuse later passes such as VRP. */
8314 tree
8315 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8316 tree type, tree op0)
8317 {
8318 tree res = fold_unary_loc (loc, code, type, op0);
8319 if (res
8320 && TREE_CODE (res) == INTEGER_CST
8321 && TREE_CODE (op0) == INTEGER_CST
8322 && CONVERT_EXPR_CODE_P (code))
8323 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8324
8325 return res;
8326 }
8327
8328 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8329 operands OP0 and OP1. LOC is the location of the resulting expression.
8330 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8331 Return the folded expression if folding is successful. Otherwise,
8332 return NULL_TREE. */
8333 static tree
8334 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8335 tree arg0, tree arg1, tree op0, tree op1)
8336 {
8337 tree tem;
8338
8339 /* We only do these simplifications if we are optimizing. */
8340 if (!optimize)
8341 return NULL_TREE;
8342
8343 /* Check for things like (A || B) && (A || C). We can convert this
8344 to A || (B && C). Note that either operator can be any of the four
8345 truth and/or operations and the transformation will still be
8346 valid. Also note that we only care about order for the
8347 ANDIF and ORIF operators. If B contains side effects, this
8348 might change the truth-value of A. */
8349 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8350 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8351 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8352 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8353 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8354 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8355 {
8356 tree a00 = TREE_OPERAND (arg0, 0);
8357 tree a01 = TREE_OPERAND (arg0, 1);
8358 tree a10 = TREE_OPERAND (arg1, 0);
8359 tree a11 = TREE_OPERAND (arg1, 1);
8360 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8361 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8362 && (code == TRUTH_AND_EXPR
8363 || code == TRUTH_OR_EXPR));
8364
8365 if (operand_equal_p (a00, a10, 0))
8366 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8367 fold_build2_loc (loc, code, type, a01, a11));
8368 else if (commutative && operand_equal_p (a00, a11, 0))
8369 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8370 fold_build2_loc (loc, code, type, a01, a10));
8371 else if (commutative && operand_equal_p (a01, a10, 0))
8372 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8373 fold_build2_loc (loc, code, type, a00, a11));
8374
8375 /* This case if tricky because we must either have commutative
8376 operators or else A10 must not have side-effects. */
8377
8378 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8379 && operand_equal_p (a01, a11, 0))
8380 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8381 fold_build2_loc (loc, code, type, a00, a10),
8382 a01);
8383 }
8384
8385 /* See if we can build a range comparison. */
8386 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8387 return tem;
8388
8389 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8390 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8391 {
8392 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8393 if (tem)
8394 return fold_build2_loc (loc, code, type, tem, arg1);
8395 }
8396
8397 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8398 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8399 {
8400 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8401 if (tem)
8402 return fold_build2_loc (loc, code, type, arg0, tem);
8403 }
8404
8405 /* Check for the possibility of merging component references. If our
8406 lhs is another similar operation, try to merge its rhs with our
8407 rhs. Then try to merge our lhs and rhs. */
8408 if (TREE_CODE (arg0) == code
8409 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8410 TREE_OPERAND (arg0, 1), arg1)))
8411 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8412
8413 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8414 return tem;
8415
8416 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8417 && (code == TRUTH_AND_EXPR
8418 || code == TRUTH_ANDIF_EXPR
8419 || code == TRUTH_OR_EXPR
8420 || code == TRUTH_ORIF_EXPR))
8421 {
8422 enum tree_code ncode, icode;
8423
8424 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8425 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8426 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8427
8428 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8429 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8430 We don't want to pack more than two leafs to a non-IF AND/OR
8431 expression.
8432 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8433 equal to IF-CODE, then we don't want to add right-hand operand.
8434 If the inner right-hand side of left-hand operand has
8435 side-effects, or isn't simple, then we can't add to it,
8436 as otherwise we might destroy if-sequence. */
8437 if (TREE_CODE (arg0) == icode
8438 && simple_operand_p_2 (arg1)
8439 /* Needed for sequence points to handle trappings, and
8440 side-effects. */
8441 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8442 {
8443 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8444 arg1);
8445 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8446 tem);
8447 }
8448 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8449 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8450 else if (TREE_CODE (arg1) == icode
8451 && simple_operand_p_2 (arg0)
8452 /* Needed for sequence points to handle trappings, and
8453 side-effects. */
8454 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8455 {
8456 tem = fold_build2_loc (loc, ncode, type,
8457 arg0, TREE_OPERAND (arg1, 0));
8458 return fold_build2_loc (loc, icode, type, tem,
8459 TREE_OPERAND (arg1, 1));
8460 }
8461 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8462 into (A OR B).
8463 For sequence point consistancy, we need to check for trapping,
8464 and side-effects. */
8465 else if (code == icode && simple_operand_p_2 (arg0)
8466 && simple_operand_p_2 (arg1))
8467 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8468 }
8469
8470 return NULL_TREE;
8471 }
8472
8473 /* Fold a binary expression of code CODE and type TYPE with operands
8474 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8475 Return the folded expression if folding is successful. Otherwise,
8476 return NULL_TREE. */
8477
8478 static tree
8479 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8480 {
8481 enum tree_code compl_code;
8482
8483 if (code == MIN_EXPR)
8484 compl_code = MAX_EXPR;
8485 else if (code == MAX_EXPR)
8486 compl_code = MIN_EXPR;
8487 else
8488 gcc_unreachable ();
8489
8490 /* MIN (MAX (a, b), b) == b. */
8491 if (TREE_CODE (op0) == compl_code
8492 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8493 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8494
8495 /* MIN (MAX (b, a), b) == b. */
8496 if (TREE_CODE (op0) == compl_code
8497 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8498 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8499 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8500
8501 /* MIN (a, MAX (a, b)) == a. */
8502 if (TREE_CODE (op1) == compl_code
8503 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8504 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8505 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8506
8507 /* MIN (a, MAX (b, a)) == a. */
8508 if (TREE_CODE (op1) == compl_code
8509 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8510 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8511 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8512
8513 return NULL_TREE;
8514 }
8515
8516 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8517 by changing CODE to reduce the magnitude of constants involved in
8518 ARG0 of the comparison.
8519 Returns a canonicalized comparison tree if a simplification was
8520 possible, otherwise returns NULL_TREE.
8521 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8522 valid if signed overflow is undefined. */
8523
8524 static tree
8525 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8526 tree arg0, tree arg1,
8527 bool *strict_overflow_p)
8528 {
8529 enum tree_code code0 = TREE_CODE (arg0);
8530 tree t, cst0 = NULL_TREE;
8531 int sgn0;
8532 bool swap = false;
8533
8534 /* Match A +- CST code arg1 and CST code arg1. We can change the
8535 first form only if overflow is undefined. */
8536 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8537 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8538 /* In principle pointers also have undefined overflow behavior,
8539 but that causes problems elsewhere. */
8540 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8541 && (code0 == MINUS_EXPR
8542 || code0 == PLUS_EXPR)
8543 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8544 || code0 == INTEGER_CST))
8545 return NULL_TREE;
8546
8547 /* Identify the constant in arg0 and its sign. */
8548 if (code0 == INTEGER_CST)
8549 cst0 = arg0;
8550 else
8551 cst0 = TREE_OPERAND (arg0, 1);
8552 sgn0 = tree_int_cst_sgn (cst0);
8553
8554 /* Overflowed constants and zero will cause problems. */
8555 if (integer_zerop (cst0)
8556 || TREE_OVERFLOW (cst0))
8557 return NULL_TREE;
8558
8559 /* See if we can reduce the magnitude of the constant in
8560 arg0 by changing the comparison code. */
8561 if (code0 == INTEGER_CST)
8562 {
8563 /* CST <= arg1 -> CST-1 < arg1. */
8564 if (code == LE_EXPR && sgn0 == 1)
8565 code = LT_EXPR;
8566 /* -CST < arg1 -> -CST-1 <= arg1. */
8567 else if (code == LT_EXPR && sgn0 == -1)
8568 code = LE_EXPR;
8569 /* CST > arg1 -> CST-1 >= arg1. */
8570 else if (code == GT_EXPR && sgn0 == 1)
8571 code = GE_EXPR;
8572 /* -CST >= arg1 -> -CST-1 > arg1. */
8573 else if (code == GE_EXPR && sgn0 == -1)
8574 code = GT_EXPR;
8575 else
8576 return NULL_TREE;
8577 /* arg1 code' CST' might be more canonical. */
8578 swap = true;
8579 }
8580 else
8581 {
8582 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8583 if (code == LT_EXPR
8584 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8585 code = LE_EXPR;
8586 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8587 else if (code == GT_EXPR
8588 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8589 code = GE_EXPR;
8590 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8591 else if (code == LE_EXPR
8592 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8593 code = LT_EXPR;
8594 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8595 else if (code == GE_EXPR
8596 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8597 code = GT_EXPR;
8598 else
8599 return NULL_TREE;
8600 *strict_overflow_p = true;
8601 }
8602
8603 /* Now build the constant reduced in magnitude. But not if that
8604 would produce one outside of its types range. */
8605 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8606 && ((sgn0 == 1
8607 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8608 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8609 || (sgn0 == -1
8610 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8611 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8612 /* We cannot swap the comparison here as that would cause us to
8613 endlessly recurse. */
8614 return NULL_TREE;
8615
8616 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8617 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8618 if (code0 != INTEGER_CST)
8619 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8620 t = fold_convert (TREE_TYPE (arg1), t);
8621
8622 /* If swapping might yield to a more canonical form, do so. */
8623 if (swap)
8624 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8625 else
8626 return fold_build2_loc (loc, code, type, t, arg1);
8627 }
8628
8629 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8630 overflow further. Try to decrease the magnitude of constants involved
8631 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8632 and put sole constants at the second argument position.
8633 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8634
8635 static tree
8636 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8637 tree arg0, tree arg1)
8638 {
8639 tree t;
8640 bool strict_overflow_p;
8641 const char * const warnmsg = G_("assuming signed overflow does not occur "
8642 "when reducing constant in comparison");
8643
8644 /* Try canonicalization by simplifying arg0. */
8645 strict_overflow_p = false;
8646 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8647 &strict_overflow_p);
8648 if (t)
8649 {
8650 if (strict_overflow_p)
8651 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8652 return t;
8653 }
8654
8655 /* Try canonicalization by simplifying arg1 using the swapped
8656 comparison. */
8657 code = swap_tree_comparison (code);
8658 strict_overflow_p = false;
8659 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8660 &strict_overflow_p);
8661 if (t && strict_overflow_p)
8662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8663 return t;
8664 }
8665
8666 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8667 space. This is used to avoid issuing overflow warnings for
8668 expressions like &p->x which can not wrap. */
8669
8670 static bool
8671 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8672 {
8673 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8674 return true;
8675
8676 if (bitpos < 0)
8677 return true;
8678
8679 wide_int wi_offset;
8680 int precision = TYPE_PRECISION (TREE_TYPE (base));
8681 if (offset == NULL_TREE)
8682 wi_offset = wi::zero (precision);
8683 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8684 return true;
8685 else
8686 wi_offset = offset;
8687
8688 bool overflow;
8689 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8690 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8691 if (overflow)
8692 return true;
8693
8694 if (!wi::fits_uhwi_p (total))
8695 return true;
8696
8697 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8698 if (size <= 0)
8699 return true;
8700
8701 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8702 array. */
8703 if (TREE_CODE (base) == ADDR_EXPR)
8704 {
8705 HOST_WIDE_INT base_size;
8706
8707 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8708 if (base_size > 0 && size < base_size)
8709 size = base_size;
8710 }
8711
8712 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8713 }
8714
8715 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8716 kind INTEGER_CST. This makes sure to properly sign-extend the
8717 constant. */
8718
8719 static HOST_WIDE_INT
8720 size_low_cst (const_tree t)
8721 {
8722 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8723 int prec = TYPE_PRECISION (TREE_TYPE (t));
8724 if (prec < HOST_BITS_PER_WIDE_INT)
8725 return sext_hwi (w, prec);
8726 return w;
8727 }
8728
8729 /* Subroutine of fold_binary. This routine performs all of the
8730 transformations that are common to the equality/inequality
8731 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8732 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8733 fold_binary should call fold_binary. Fold a comparison with
8734 tree code CODE and type TYPE with operands OP0 and OP1. Return
8735 the folded comparison or NULL_TREE. */
8736
8737 static tree
8738 fold_comparison (location_t loc, enum tree_code code, tree type,
8739 tree op0, tree op1)
8740 {
8741 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8742 tree arg0, arg1, tem;
8743
8744 arg0 = op0;
8745 arg1 = op1;
8746
8747 STRIP_SIGN_NOPS (arg0);
8748 STRIP_SIGN_NOPS (arg1);
8749
8750 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8751 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8752 && (equality_code
8753 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8754 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8755 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8756 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8757 && TREE_CODE (arg1) == INTEGER_CST
8758 && !TREE_OVERFLOW (arg1))
8759 {
8760 const enum tree_code
8761 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8762 tree const1 = TREE_OPERAND (arg0, 1);
8763 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8764 tree variable = TREE_OPERAND (arg0, 0);
8765 tree new_const = int_const_binop (reverse_op, const2, const1);
8766
8767 /* If the constant operation overflowed this can be
8768 simplified as a comparison against INT_MAX/INT_MIN. */
8769 if (TREE_OVERFLOW (new_const)
8770 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8771 {
8772 int const1_sgn = tree_int_cst_sgn (const1);
8773 enum tree_code code2 = code;
8774
8775 /* Get the sign of the constant on the lhs if the
8776 operation were VARIABLE + CONST1. */
8777 if (TREE_CODE (arg0) == MINUS_EXPR)
8778 const1_sgn = -const1_sgn;
8779
8780 /* The sign of the constant determines if we overflowed
8781 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8782 Canonicalize to the INT_MIN overflow by swapping the comparison
8783 if necessary. */
8784 if (const1_sgn == -1)
8785 code2 = swap_tree_comparison (code);
8786
8787 /* We now can look at the canonicalized case
8788 VARIABLE + 1 CODE2 INT_MIN
8789 and decide on the result. */
8790 switch (code2)
8791 {
8792 case EQ_EXPR:
8793 case LT_EXPR:
8794 case LE_EXPR:
8795 return
8796 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8797
8798 case NE_EXPR:
8799 case GE_EXPR:
8800 case GT_EXPR:
8801 return
8802 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8803
8804 default:
8805 gcc_unreachable ();
8806 }
8807 }
8808 else
8809 {
8810 if (!equality_code)
8811 fold_overflow_warning ("assuming signed overflow does not occur "
8812 "when changing X +- C1 cmp C2 to "
8813 "X cmp C2 -+ C1",
8814 WARN_STRICT_OVERFLOW_COMPARISON);
8815 return fold_build2_loc (loc, code, type, variable, new_const);
8816 }
8817 }
8818
8819 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8820 if (TREE_CODE (arg0) == MINUS_EXPR
8821 && equality_code
8822 && integer_zerop (arg1))
8823 {
8824 /* ??? The transformation is valid for the other operators if overflow
8825 is undefined for the type, but performing it here badly interacts
8826 with the transformation in fold_cond_expr_with_comparison which
8827 attempts to synthetize ABS_EXPR. */
8828 if (!equality_code)
8829 fold_overflow_warning ("assuming signed overflow does not occur "
8830 "when changing X - Y cmp 0 to X cmp Y",
8831 WARN_STRICT_OVERFLOW_COMPARISON);
8832 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8833 TREE_OPERAND (arg0, 1));
8834 }
8835
8836 /* For comparisons of pointers we can decompose it to a compile time
8837 comparison of the base objects and the offsets into the object.
8838 This requires at least one operand being an ADDR_EXPR or a
8839 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8840 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8841 && (TREE_CODE (arg0) == ADDR_EXPR
8842 || TREE_CODE (arg1) == ADDR_EXPR
8843 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8844 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8845 {
8846 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8847 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8848 machine_mode mode;
8849 int volatilep, unsignedp;
8850 bool indirect_base0 = false, indirect_base1 = false;
8851
8852 /* Get base and offset for the access. Strip ADDR_EXPR for
8853 get_inner_reference, but put it back by stripping INDIRECT_REF
8854 off the base object if possible. indirect_baseN will be true
8855 if baseN is not an address but refers to the object itself. */
8856 base0 = arg0;
8857 if (TREE_CODE (arg0) == ADDR_EXPR)
8858 {
8859 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8860 &bitsize, &bitpos0, &offset0, &mode,
8861 &unsignedp, &volatilep, false);
8862 if (TREE_CODE (base0) == INDIRECT_REF)
8863 base0 = TREE_OPERAND (base0, 0);
8864 else
8865 indirect_base0 = true;
8866 }
8867 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8868 {
8869 base0 = TREE_OPERAND (arg0, 0);
8870 STRIP_SIGN_NOPS (base0);
8871 if (TREE_CODE (base0) == ADDR_EXPR)
8872 {
8873 base0 = TREE_OPERAND (base0, 0);
8874 indirect_base0 = true;
8875 }
8876 offset0 = TREE_OPERAND (arg0, 1);
8877 if (tree_fits_shwi_p (offset0))
8878 {
8879 HOST_WIDE_INT off = size_low_cst (offset0);
8880 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8881 * BITS_PER_UNIT)
8882 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8883 {
8884 bitpos0 = off * BITS_PER_UNIT;
8885 offset0 = NULL_TREE;
8886 }
8887 }
8888 }
8889
8890 base1 = arg1;
8891 if (TREE_CODE (arg1) == ADDR_EXPR)
8892 {
8893 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8894 &bitsize, &bitpos1, &offset1, &mode,
8895 &unsignedp, &volatilep, false);
8896 if (TREE_CODE (base1) == INDIRECT_REF)
8897 base1 = TREE_OPERAND (base1, 0);
8898 else
8899 indirect_base1 = true;
8900 }
8901 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8902 {
8903 base1 = TREE_OPERAND (arg1, 0);
8904 STRIP_SIGN_NOPS (base1);
8905 if (TREE_CODE (base1) == ADDR_EXPR)
8906 {
8907 base1 = TREE_OPERAND (base1, 0);
8908 indirect_base1 = true;
8909 }
8910 offset1 = TREE_OPERAND (arg1, 1);
8911 if (tree_fits_shwi_p (offset1))
8912 {
8913 HOST_WIDE_INT off = size_low_cst (offset1);
8914 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8915 * BITS_PER_UNIT)
8916 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8917 {
8918 bitpos1 = off * BITS_PER_UNIT;
8919 offset1 = NULL_TREE;
8920 }
8921 }
8922 }
8923
8924 /* A local variable can never be pointed to by
8925 the default SSA name of an incoming parameter. */
8926 if ((TREE_CODE (arg0) == ADDR_EXPR
8927 && indirect_base0
8928 && TREE_CODE (base0) == VAR_DECL
8929 && auto_var_in_fn_p (base0, current_function_decl)
8930 && !indirect_base1
8931 && TREE_CODE (base1) == SSA_NAME
8932 && SSA_NAME_IS_DEFAULT_DEF (base1)
8933 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8934 || (TREE_CODE (arg1) == ADDR_EXPR
8935 && indirect_base1
8936 && TREE_CODE (base1) == VAR_DECL
8937 && auto_var_in_fn_p (base1, current_function_decl)
8938 && !indirect_base0
8939 && TREE_CODE (base0) == SSA_NAME
8940 && SSA_NAME_IS_DEFAULT_DEF (base0)
8941 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8942 {
8943 if (code == NE_EXPR)
8944 return constant_boolean_node (1, type);
8945 else if (code == EQ_EXPR)
8946 return constant_boolean_node (0, type);
8947 }
8948 /* If we have equivalent bases we might be able to simplify. */
8949 else if (indirect_base0 == indirect_base1
8950 && operand_equal_p (base0, base1, 0))
8951 {
8952 /* We can fold this expression to a constant if the non-constant
8953 offset parts are equal. */
8954 if ((offset0 == offset1
8955 || (offset0 && offset1
8956 && operand_equal_p (offset0, offset1, 0)))
8957 && (code == EQ_EXPR
8958 || code == NE_EXPR
8959 || (indirect_base0 && DECL_P (base0))
8960 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8961
8962 {
8963 if (!equality_code
8964 && bitpos0 != bitpos1
8965 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8966 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8967 fold_overflow_warning (("assuming pointer wraparound does not "
8968 "occur when comparing P +- C1 with "
8969 "P +- C2"),
8970 WARN_STRICT_OVERFLOW_CONDITIONAL);
8971
8972 switch (code)
8973 {
8974 case EQ_EXPR:
8975 return constant_boolean_node (bitpos0 == bitpos1, type);
8976 case NE_EXPR:
8977 return constant_boolean_node (bitpos0 != bitpos1, type);
8978 case LT_EXPR:
8979 return constant_boolean_node (bitpos0 < bitpos1, type);
8980 case LE_EXPR:
8981 return constant_boolean_node (bitpos0 <= bitpos1, type);
8982 case GE_EXPR:
8983 return constant_boolean_node (bitpos0 >= bitpos1, type);
8984 case GT_EXPR:
8985 return constant_boolean_node (bitpos0 > bitpos1, type);
8986 default:;
8987 }
8988 }
8989 /* We can simplify the comparison to a comparison of the variable
8990 offset parts if the constant offset parts are equal.
8991 Be careful to use signed sizetype here because otherwise we
8992 mess with array offsets in the wrong way. This is possible
8993 because pointer arithmetic is restricted to retain within an
8994 object and overflow on pointer differences is undefined as of
8995 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8996 else if (bitpos0 == bitpos1
8997 && (equality_code
8998 || (indirect_base0 && DECL_P (base0))
8999 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9000 {
9001 /* By converting to signed sizetype we cover middle-end pointer
9002 arithmetic which operates on unsigned pointer types of size
9003 type size and ARRAY_REF offsets which are properly sign or
9004 zero extended from their type in case it is narrower than
9005 sizetype. */
9006 if (offset0 == NULL_TREE)
9007 offset0 = build_int_cst (ssizetype, 0);
9008 else
9009 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9010 if (offset1 == NULL_TREE)
9011 offset1 = build_int_cst (ssizetype, 0);
9012 else
9013 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9014
9015 if (!equality_code
9016 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9017 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9018 fold_overflow_warning (("assuming pointer wraparound does not "
9019 "occur when comparing P +- C1 with "
9020 "P +- C2"),
9021 WARN_STRICT_OVERFLOW_COMPARISON);
9022
9023 return fold_build2_loc (loc, code, type, offset0, offset1);
9024 }
9025 }
9026 /* For non-equal bases we can simplify if they are addresses
9027 declarations with different addresses. */
9028 else if (indirect_base0 && indirect_base1
9029 /* We know that !operand_equal_p (base0, base1, 0)
9030 because the if condition was false. But make
9031 sure two decls are not the same. */
9032 && base0 != base1
9033 && TREE_CODE (arg0) == ADDR_EXPR
9034 && TREE_CODE (arg1) == ADDR_EXPR
9035 && DECL_P (base0)
9036 && DECL_P (base1)
9037 /* Watch for aliases. */
9038 && (!decl_in_symtab_p (base0)
9039 || !decl_in_symtab_p (base1)
9040 || !symtab_node::get_create (base0)->equal_address_to
9041 (symtab_node::get_create (base1))))
9042 {
9043 if (code == EQ_EXPR)
9044 return omit_two_operands_loc (loc, type, boolean_false_node,
9045 arg0, arg1);
9046 else if (code == NE_EXPR)
9047 return omit_two_operands_loc (loc, type, boolean_true_node,
9048 arg0, arg1);
9049 }
9050 /* For equal offsets we can simplify to a comparison of the
9051 base addresses. */
9052 else if (bitpos0 == bitpos1
9053 && (indirect_base0
9054 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9055 && (indirect_base1
9056 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9057 && ((offset0 == offset1)
9058 || (offset0 && offset1
9059 && operand_equal_p (offset0, offset1, 0))))
9060 {
9061 if (indirect_base0)
9062 base0 = build_fold_addr_expr_loc (loc, base0);
9063 if (indirect_base1)
9064 base1 = build_fold_addr_expr_loc (loc, base1);
9065 return fold_build2_loc (loc, code, type, base0, base1);
9066 }
9067 }
9068
9069 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9070 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9071 the resulting offset is smaller in absolute value than the
9072 original one and has the same sign. */
9073 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9074 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9075 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9076 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9077 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9078 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9079 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9080 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9081 {
9082 tree const1 = TREE_OPERAND (arg0, 1);
9083 tree const2 = TREE_OPERAND (arg1, 1);
9084 tree variable1 = TREE_OPERAND (arg0, 0);
9085 tree variable2 = TREE_OPERAND (arg1, 0);
9086 tree cst;
9087 const char * const warnmsg = G_("assuming signed overflow does not "
9088 "occur when combining constants around "
9089 "a comparison");
9090
9091 /* Put the constant on the side where it doesn't overflow and is
9092 of lower absolute value and of same sign than before. */
9093 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9094 ? MINUS_EXPR : PLUS_EXPR,
9095 const2, const1);
9096 if (!TREE_OVERFLOW (cst)
9097 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9098 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9099 {
9100 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9101 return fold_build2_loc (loc, code, type,
9102 variable1,
9103 fold_build2_loc (loc, TREE_CODE (arg1),
9104 TREE_TYPE (arg1),
9105 variable2, cst));
9106 }
9107
9108 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9109 ? MINUS_EXPR : PLUS_EXPR,
9110 const1, const2);
9111 if (!TREE_OVERFLOW (cst)
9112 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9113 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9114 {
9115 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9116 return fold_build2_loc (loc, code, type,
9117 fold_build2_loc (loc, TREE_CODE (arg0),
9118 TREE_TYPE (arg0),
9119 variable1, cst),
9120 variable2);
9121 }
9122 }
9123
9124 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9125 signed arithmetic case. That form is created by the compiler
9126 often enough for folding it to be of value. One example is in
9127 computing loop trip counts after Operator Strength Reduction. */
9128 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9129 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9130 && TREE_CODE (arg0) == MULT_EXPR
9131 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9132 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9133 && integer_zerop (arg1))
9134 {
9135 tree const1 = TREE_OPERAND (arg0, 1);
9136 tree const2 = arg1; /* zero */
9137 tree variable1 = TREE_OPERAND (arg0, 0);
9138 enum tree_code cmp_code = code;
9139
9140 /* Handle unfolded multiplication by zero. */
9141 if (integer_zerop (const1))
9142 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9143
9144 fold_overflow_warning (("assuming signed overflow does not occur when "
9145 "eliminating multiplication in comparison "
9146 "with zero"),
9147 WARN_STRICT_OVERFLOW_COMPARISON);
9148
9149 /* If const1 is negative we swap the sense of the comparison. */
9150 if (tree_int_cst_sgn (const1) < 0)
9151 cmp_code = swap_tree_comparison (cmp_code);
9152
9153 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9154 }
9155
9156 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9157 if (tem)
9158 return tem;
9159
9160 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9161 {
9162 tree targ0 = strip_float_extensions (arg0);
9163 tree targ1 = strip_float_extensions (arg1);
9164 tree newtype = TREE_TYPE (targ0);
9165
9166 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9167 newtype = TREE_TYPE (targ1);
9168
9169 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9170 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9171 return fold_build2_loc (loc, code, type,
9172 fold_convert_loc (loc, newtype, targ0),
9173 fold_convert_loc (loc, newtype, targ1));
9174
9175 /* (-a) CMP (-b) -> b CMP a */
9176 if (TREE_CODE (arg0) == NEGATE_EXPR
9177 && TREE_CODE (arg1) == NEGATE_EXPR)
9178 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9179 TREE_OPERAND (arg0, 0));
9180
9181 if (TREE_CODE (arg1) == REAL_CST)
9182 {
9183 REAL_VALUE_TYPE cst;
9184 cst = TREE_REAL_CST (arg1);
9185
9186 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9187 if (TREE_CODE (arg0) == NEGATE_EXPR)
9188 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9189 TREE_OPERAND (arg0, 0),
9190 build_real (TREE_TYPE (arg1),
9191 real_value_negate (&cst)));
9192
9193 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9194 /* a CMP (-0) -> a CMP 0 */
9195 if (REAL_VALUE_MINUS_ZERO (cst))
9196 return fold_build2_loc (loc, code, type, arg0,
9197 build_real (TREE_TYPE (arg1), dconst0));
9198
9199 /* x != NaN is always true, other ops are always false. */
9200 if (REAL_VALUE_ISNAN (cst)
9201 && ! HONOR_SNANS (arg1))
9202 {
9203 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9204 return omit_one_operand_loc (loc, type, tem, arg0);
9205 }
9206
9207 /* Fold comparisons against infinity. */
9208 if (REAL_VALUE_ISINF (cst)
9209 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9210 {
9211 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9212 if (tem != NULL_TREE)
9213 return tem;
9214 }
9215 }
9216
9217 /* If this is a comparison of a real constant with a PLUS_EXPR
9218 or a MINUS_EXPR of a real constant, we can convert it into a
9219 comparison with a revised real constant as long as no overflow
9220 occurs when unsafe_math_optimizations are enabled. */
9221 if (flag_unsafe_math_optimizations
9222 && TREE_CODE (arg1) == REAL_CST
9223 && (TREE_CODE (arg0) == PLUS_EXPR
9224 || TREE_CODE (arg0) == MINUS_EXPR)
9225 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9226 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9227 ? MINUS_EXPR : PLUS_EXPR,
9228 arg1, TREE_OPERAND (arg0, 1)))
9229 && !TREE_OVERFLOW (tem))
9230 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9231
9232 /* Likewise, we can simplify a comparison of a real constant with
9233 a MINUS_EXPR whose first operand is also a real constant, i.e.
9234 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9235 floating-point types only if -fassociative-math is set. */
9236 if (flag_associative_math
9237 && TREE_CODE (arg1) == REAL_CST
9238 && TREE_CODE (arg0) == MINUS_EXPR
9239 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9240 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9241 arg1))
9242 && !TREE_OVERFLOW (tem))
9243 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9244 TREE_OPERAND (arg0, 1), tem);
9245
9246 /* Fold comparisons against built-in math functions. */
9247 if (TREE_CODE (arg1) == REAL_CST
9248 && flag_unsafe_math_optimizations
9249 && ! flag_errno_math)
9250 {
9251 enum built_in_function fcode = builtin_mathfn_code (arg0);
9252
9253 if (fcode != END_BUILTINS)
9254 {
9255 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9256 if (tem != NULL_TREE)
9257 return tem;
9258 }
9259 }
9260 }
9261
9262 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9263 && CONVERT_EXPR_P (arg0))
9264 {
9265 /* If we are widening one operand of an integer comparison,
9266 see if the other operand is similarly being widened. Perhaps we
9267 can do the comparison in the narrower type. */
9268 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9269 if (tem)
9270 return tem;
9271
9272 /* Or if we are changing signedness. */
9273 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9274 if (tem)
9275 return tem;
9276 }
9277
9278 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9279 constant, we can simplify it. */
9280 if (TREE_CODE (arg1) == INTEGER_CST
9281 && (TREE_CODE (arg0) == MIN_EXPR
9282 || TREE_CODE (arg0) == MAX_EXPR)
9283 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9284 {
9285 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9286 if (tem)
9287 return tem;
9288 }
9289
9290 /* Simplify comparison of something with itself. (For IEEE
9291 floating-point, we can only do some of these simplifications.) */
9292 if (operand_equal_p (arg0, arg1, 0))
9293 {
9294 switch (code)
9295 {
9296 case EQ_EXPR:
9297 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9298 || ! HONOR_NANS (arg0))
9299 return constant_boolean_node (1, type);
9300 break;
9301
9302 case GE_EXPR:
9303 case LE_EXPR:
9304 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9305 || ! HONOR_NANS (arg0))
9306 return constant_boolean_node (1, type);
9307 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9308
9309 case NE_EXPR:
9310 /* For NE, we can only do this simplification if integer
9311 or we don't honor IEEE floating point NaNs. */
9312 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9313 && HONOR_NANS (arg0))
9314 break;
9315 /* ... fall through ... */
9316 case GT_EXPR:
9317 case LT_EXPR:
9318 return constant_boolean_node (0, type);
9319 default:
9320 gcc_unreachable ();
9321 }
9322 }
9323
9324 /* If we are comparing an expression that just has comparisons
9325 of two integer values, arithmetic expressions of those comparisons,
9326 and constants, we can simplify it. There are only three cases
9327 to check: the two values can either be equal, the first can be
9328 greater, or the second can be greater. Fold the expression for
9329 those three values. Since each value must be 0 or 1, we have
9330 eight possibilities, each of which corresponds to the constant 0
9331 or 1 or one of the six possible comparisons.
9332
9333 This handles common cases like (a > b) == 0 but also handles
9334 expressions like ((x > y) - (y > x)) > 0, which supposedly
9335 occur in macroized code. */
9336
9337 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9338 {
9339 tree cval1 = 0, cval2 = 0;
9340 int save_p = 0;
9341
9342 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9343 /* Don't handle degenerate cases here; they should already
9344 have been handled anyway. */
9345 && cval1 != 0 && cval2 != 0
9346 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9347 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9348 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9349 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9350 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9351 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9352 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9353 {
9354 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9355 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9356
9357 /* We can't just pass T to eval_subst in case cval1 or cval2
9358 was the same as ARG1. */
9359
9360 tree high_result
9361 = fold_build2_loc (loc, code, type,
9362 eval_subst (loc, arg0, cval1, maxval,
9363 cval2, minval),
9364 arg1);
9365 tree equal_result
9366 = fold_build2_loc (loc, code, type,
9367 eval_subst (loc, arg0, cval1, maxval,
9368 cval2, maxval),
9369 arg1);
9370 tree low_result
9371 = fold_build2_loc (loc, code, type,
9372 eval_subst (loc, arg0, cval1, minval,
9373 cval2, maxval),
9374 arg1);
9375
9376 /* All three of these results should be 0 or 1. Confirm they are.
9377 Then use those values to select the proper code to use. */
9378
9379 if (TREE_CODE (high_result) == INTEGER_CST
9380 && TREE_CODE (equal_result) == INTEGER_CST
9381 && TREE_CODE (low_result) == INTEGER_CST)
9382 {
9383 /* Make a 3-bit mask with the high-order bit being the
9384 value for `>', the next for '=', and the low for '<'. */
9385 switch ((integer_onep (high_result) * 4)
9386 + (integer_onep (equal_result) * 2)
9387 + integer_onep (low_result))
9388 {
9389 case 0:
9390 /* Always false. */
9391 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9392 case 1:
9393 code = LT_EXPR;
9394 break;
9395 case 2:
9396 code = EQ_EXPR;
9397 break;
9398 case 3:
9399 code = LE_EXPR;
9400 break;
9401 case 4:
9402 code = GT_EXPR;
9403 break;
9404 case 5:
9405 code = NE_EXPR;
9406 break;
9407 case 6:
9408 code = GE_EXPR;
9409 break;
9410 case 7:
9411 /* Always true. */
9412 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9413 }
9414
9415 if (save_p)
9416 {
9417 tem = save_expr (build2 (code, type, cval1, cval2));
9418 SET_EXPR_LOCATION (tem, loc);
9419 return tem;
9420 }
9421 return fold_build2_loc (loc, code, type, cval1, cval2);
9422 }
9423 }
9424 }
9425
9426 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9427 into a single range test. */
9428 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9429 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9430 && TREE_CODE (arg1) == INTEGER_CST
9431 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9432 && !integer_zerop (TREE_OPERAND (arg0, 1))
9433 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9434 && !TREE_OVERFLOW (arg1))
9435 {
9436 tem = fold_div_compare (loc, code, type, arg0, arg1);
9437 if (tem != NULL_TREE)
9438 return tem;
9439 }
9440
9441 /* Fold ~X op ~Y as Y op X. */
9442 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9443 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9444 {
9445 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9446 return fold_build2_loc (loc, code, type,
9447 fold_convert_loc (loc, cmp_type,
9448 TREE_OPERAND (arg1, 0)),
9449 TREE_OPERAND (arg0, 0));
9450 }
9451
9452 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9453 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9454 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9455 {
9456 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9457 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9458 TREE_OPERAND (arg0, 0),
9459 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9460 fold_convert_loc (loc, cmp_type, arg1)));
9461 }
9462
9463 return NULL_TREE;
9464 }
9465
9466
9467 /* Subroutine of fold_binary. Optimize complex multiplications of the
9468 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9469 argument EXPR represents the expression "z" of type TYPE. */
9470
9471 static tree
9472 fold_mult_zconjz (location_t loc, tree type, tree expr)
9473 {
9474 tree itype = TREE_TYPE (type);
9475 tree rpart, ipart, tem;
9476
9477 if (TREE_CODE (expr) == COMPLEX_EXPR)
9478 {
9479 rpart = TREE_OPERAND (expr, 0);
9480 ipart = TREE_OPERAND (expr, 1);
9481 }
9482 else if (TREE_CODE (expr) == COMPLEX_CST)
9483 {
9484 rpart = TREE_REALPART (expr);
9485 ipart = TREE_IMAGPART (expr);
9486 }
9487 else
9488 {
9489 expr = save_expr (expr);
9490 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9491 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9492 }
9493
9494 rpart = save_expr (rpart);
9495 ipart = save_expr (ipart);
9496 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9497 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9498 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9499 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9500 build_zero_cst (itype));
9501 }
9502
9503
9504 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9505 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9506 guarantees that P and N have the same least significant log2(M) bits.
9507 N is not otherwise constrained. In particular, N is not normalized to
9508 0 <= N < M as is common. In general, the precise value of P is unknown.
9509 M is chosen as large as possible such that constant N can be determined.
9510
9511 Returns M and sets *RESIDUE to N.
9512
9513 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9514 account. This is not always possible due to PR 35705.
9515 */
9516
9517 static unsigned HOST_WIDE_INT
9518 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9519 bool allow_func_align)
9520 {
9521 enum tree_code code;
9522
9523 *residue = 0;
9524
9525 code = TREE_CODE (expr);
9526 if (code == ADDR_EXPR)
9527 {
9528 unsigned int bitalign;
9529 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9530 *residue /= BITS_PER_UNIT;
9531 return bitalign / BITS_PER_UNIT;
9532 }
9533 else if (code == POINTER_PLUS_EXPR)
9534 {
9535 tree op0, op1;
9536 unsigned HOST_WIDE_INT modulus;
9537 enum tree_code inner_code;
9538
9539 op0 = TREE_OPERAND (expr, 0);
9540 STRIP_NOPS (op0);
9541 modulus = get_pointer_modulus_and_residue (op0, residue,
9542 allow_func_align);
9543
9544 op1 = TREE_OPERAND (expr, 1);
9545 STRIP_NOPS (op1);
9546 inner_code = TREE_CODE (op1);
9547 if (inner_code == INTEGER_CST)
9548 {
9549 *residue += TREE_INT_CST_LOW (op1);
9550 return modulus;
9551 }
9552 else if (inner_code == MULT_EXPR)
9553 {
9554 op1 = TREE_OPERAND (op1, 1);
9555 if (TREE_CODE (op1) == INTEGER_CST)
9556 {
9557 unsigned HOST_WIDE_INT align;
9558
9559 /* Compute the greatest power-of-2 divisor of op1. */
9560 align = TREE_INT_CST_LOW (op1);
9561 align &= -align;
9562
9563 /* If align is non-zero and less than *modulus, replace
9564 *modulus with align., If align is 0, then either op1 is 0
9565 or the greatest power-of-2 divisor of op1 doesn't fit in an
9566 unsigned HOST_WIDE_INT. In either case, no additional
9567 constraint is imposed. */
9568 if (align)
9569 modulus = MIN (modulus, align);
9570
9571 return modulus;
9572 }
9573 }
9574 }
9575
9576 /* If we get here, we were unable to determine anything useful about the
9577 expression. */
9578 return 1;
9579 }
9580
9581 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9582 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9583
9584 static bool
9585 vec_cst_ctor_to_array (tree arg, tree *elts)
9586 {
9587 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9588
9589 if (TREE_CODE (arg) == VECTOR_CST)
9590 {
9591 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9592 elts[i] = VECTOR_CST_ELT (arg, i);
9593 }
9594 else if (TREE_CODE (arg) == CONSTRUCTOR)
9595 {
9596 constructor_elt *elt;
9597
9598 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9599 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9600 return false;
9601 else
9602 elts[i] = elt->value;
9603 }
9604 else
9605 return false;
9606 for (; i < nelts; i++)
9607 elts[i]
9608 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9609 return true;
9610 }
9611
9612 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9613 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9614 NULL_TREE otherwise. */
9615
9616 static tree
9617 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9618 {
9619 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9620 tree *elts;
9621 bool need_ctor = false;
9622
9623 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9624 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9625 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9626 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9627 return NULL_TREE;
9628
9629 elts = XALLOCAVEC (tree, nelts * 3);
9630 if (!vec_cst_ctor_to_array (arg0, elts)
9631 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9632 return NULL_TREE;
9633
9634 for (i = 0; i < nelts; i++)
9635 {
9636 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9637 need_ctor = true;
9638 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9639 }
9640
9641 if (need_ctor)
9642 {
9643 vec<constructor_elt, va_gc> *v;
9644 vec_alloc (v, nelts);
9645 for (i = 0; i < nelts; i++)
9646 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9647 return build_constructor (type, v);
9648 }
9649 else
9650 return build_vector (type, &elts[2 * nelts]);
9651 }
9652
9653 /* Try to fold a pointer difference of type TYPE two address expressions of
9654 array references AREF0 and AREF1 using location LOC. Return a
9655 simplified expression for the difference or NULL_TREE. */
9656
9657 static tree
9658 fold_addr_of_array_ref_difference (location_t loc, tree type,
9659 tree aref0, tree aref1)
9660 {
9661 tree base0 = TREE_OPERAND (aref0, 0);
9662 tree base1 = TREE_OPERAND (aref1, 0);
9663 tree base_offset = build_int_cst (type, 0);
9664
9665 /* If the bases are array references as well, recurse. If the bases
9666 are pointer indirections compute the difference of the pointers.
9667 If the bases are equal, we are set. */
9668 if ((TREE_CODE (base0) == ARRAY_REF
9669 && TREE_CODE (base1) == ARRAY_REF
9670 && (base_offset
9671 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9672 || (INDIRECT_REF_P (base0)
9673 && INDIRECT_REF_P (base1)
9674 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9675 TREE_OPERAND (base0, 0),
9676 TREE_OPERAND (base1, 0))))
9677 || operand_equal_p (base0, base1, 0))
9678 {
9679 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9680 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9681 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9682 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9683 return fold_build2_loc (loc, PLUS_EXPR, type,
9684 base_offset,
9685 fold_build2_loc (loc, MULT_EXPR, type,
9686 diff, esz));
9687 }
9688 return NULL_TREE;
9689 }
9690
9691 /* If the real or vector real constant CST of type TYPE has an exact
9692 inverse, return it, else return NULL. */
9693
9694 tree
9695 exact_inverse (tree type, tree cst)
9696 {
9697 REAL_VALUE_TYPE r;
9698 tree unit_type, *elts;
9699 machine_mode mode;
9700 unsigned vec_nelts, i;
9701
9702 switch (TREE_CODE (cst))
9703 {
9704 case REAL_CST:
9705 r = TREE_REAL_CST (cst);
9706
9707 if (exact_real_inverse (TYPE_MODE (type), &r))
9708 return build_real (type, r);
9709
9710 return NULL_TREE;
9711
9712 case VECTOR_CST:
9713 vec_nelts = VECTOR_CST_NELTS (cst);
9714 elts = XALLOCAVEC (tree, vec_nelts);
9715 unit_type = TREE_TYPE (type);
9716 mode = TYPE_MODE (unit_type);
9717
9718 for (i = 0; i < vec_nelts; i++)
9719 {
9720 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9721 if (!exact_real_inverse (mode, &r))
9722 return NULL_TREE;
9723 elts[i] = build_real (unit_type, r);
9724 }
9725
9726 return build_vector (type, elts);
9727
9728 default:
9729 return NULL_TREE;
9730 }
9731 }
9732
9733 /* Mask out the tz least significant bits of X of type TYPE where
9734 tz is the number of trailing zeroes in Y. */
9735 static wide_int
9736 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9737 {
9738 int tz = wi::ctz (y);
9739 if (tz > 0)
9740 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9741 return x;
9742 }
9743
9744 /* Return true when T is an address and is known to be nonzero.
9745 For floating point we further ensure that T is not denormal.
9746 Similar logic is present in nonzero_address in rtlanal.h.
9747
9748 If the return value is based on the assumption that signed overflow
9749 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9750 change *STRICT_OVERFLOW_P. */
9751
9752 static bool
9753 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9754 {
9755 tree type = TREE_TYPE (t);
9756 enum tree_code code;
9757
9758 /* Doing something useful for floating point would need more work. */
9759 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9760 return false;
9761
9762 code = TREE_CODE (t);
9763 switch (TREE_CODE_CLASS (code))
9764 {
9765 case tcc_unary:
9766 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9767 strict_overflow_p);
9768 case tcc_binary:
9769 case tcc_comparison:
9770 return tree_binary_nonzero_warnv_p (code, type,
9771 TREE_OPERAND (t, 0),
9772 TREE_OPERAND (t, 1),
9773 strict_overflow_p);
9774 case tcc_constant:
9775 case tcc_declaration:
9776 case tcc_reference:
9777 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9778
9779 default:
9780 break;
9781 }
9782
9783 switch (code)
9784 {
9785 case TRUTH_NOT_EXPR:
9786 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9787 strict_overflow_p);
9788
9789 case TRUTH_AND_EXPR:
9790 case TRUTH_OR_EXPR:
9791 case TRUTH_XOR_EXPR:
9792 return tree_binary_nonzero_warnv_p (code, type,
9793 TREE_OPERAND (t, 0),
9794 TREE_OPERAND (t, 1),
9795 strict_overflow_p);
9796
9797 case COND_EXPR:
9798 case CONSTRUCTOR:
9799 case OBJ_TYPE_REF:
9800 case ASSERT_EXPR:
9801 case ADDR_EXPR:
9802 case WITH_SIZE_EXPR:
9803 case SSA_NAME:
9804 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9805
9806 case COMPOUND_EXPR:
9807 case MODIFY_EXPR:
9808 case BIND_EXPR:
9809 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9810 strict_overflow_p);
9811
9812 case SAVE_EXPR:
9813 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9814 strict_overflow_p);
9815
9816 case CALL_EXPR:
9817 {
9818 tree fndecl = get_callee_fndecl (t);
9819 if (!fndecl) return false;
9820 if (flag_delete_null_pointer_checks && !flag_check_new
9821 && DECL_IS_OPERATOR_NEW (fndecl)
9822 && !TREE_NOTHROW (fndecl))
9823 return true;
9824 if (flag_delete_null_pointer_checks
9825 && lookup_attribute ("returns_nonnull",
9826 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9827 return true;
9828 return alloca_call_p (t);
9829 }
9830
9831 default:
9832 break;
9833 }
9834 return false;
9835 }
9836
9837 /* Return true when T is an address and is known to be nonzero.
9838 Handle warnings about undefined signed overflow. */
9839
9840 static bool
9841 tree_expr_nonzero_p (tree t)
9842 {
9843 bool ret, strict_overflow_p;
9844
9845 strict_overflow_p = false;
9846 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9847 if (strict_overflow_p)
9848 fold_overflow_warning (("assuming signed overflow does not occur when "
9849 "determining that expression is always "
9850 "non-zero"),
9851 WARN_STRICT_OVERFLOW_MISC);
9852 return ret;
9853 }
9854
9855 /* Fold a binary expression of code CODE and type TYPE with operands
9856 OP0 and OP1. LOC is the location of the resulting expression.
9857 Return the folded expression if folding is successful. Otherwise,
9858 return NULL_TREE. */
9859
9860 tree
9861 fold_binary_loc (location_t loc,
9862 enum tree_code code, tree type, tree op0, tree op1)
9863 {
9864 enum tree_code_class kind = TREE_CODE_CLASS (code);
9865 tree arg0, arg1, tem;
9866 tree t1 = NULL_TREE;
9867 bool strict_overflow_p;
9868 unsigned int prec;
9869
9870 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9871 && TREE_CODE_LENGTH (code) == 2
9872 && op0 != NULL_TREE
9873 && op1 != NULL_TREE);
9874
9875 arg0 = op0;
9876 arg1 = op1;
9877
9878 /* Strip any conversions that don't change the mode. This is
9879 safe for every expression, except for a comparison expression
9880 because its signedness is derived from its operands. So, in
9881 the latter case, only strip conversions that don't change the
9882 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9883 preserved.
9884
9885 Note that this is done as an internal manipulation within the
9886 constant folder, in order to find the simplest representation
9887 of the arguments so that their form can be studied. In any
9888 cases, the appropriate type conversions should be put back in
9889 the tree that will get out of the constant folder. */
9890
9891 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9892 {
9893 STRIP_SIGN_NOPS (arg0);
9894 STRIP_SIGN_NOPS (arg1);
9895 }
9896 else
9897 {
9898 STRIP_NOPS (arg0);
9899 STRIP_NOPS (arg1);
9900 }
9901
9902 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9903 constant but we can't do arithmetic on them. */
9904 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9905 {
9906 tem = const_binop (code, type, arg0, arg1);
9907 if (tem != NULL_TREE)
9908 {
9909 if (TREE_TYPE (tem) != type)
9910 tem = fold_convert_loc (loc, type, tem);
9911 return tem;
9912 }
9913 }
9914
9915 /* If this is a commutative operation, and ARG0 is a constant, move it
9916 to ARG1 to reduce the number of tests below. */
9917 if (commutative_tree_code (code)
9918 && tree_swap_operands_p (arg0, arg1, true))
9919 return fold_build2_loc (loc, code, type, op1, op0);
9920
9921 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9922 to ARG1 to reduce the number of tests below. */
9923 if (kind == tcc_comparison
9924 && tree_swap_operands_p (arg0, arg1, true))
9925 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9926
9927 tem = generic_simplify (loc, code, type, op0, op1);
9928 if (tem)
9929 return tem;
9930
9931 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9932
9933 First check for cases where an arithmetic operation is applied to a
9934 compound, conditional, or comparison operation. Push the arithmetic
9935 operation inside the compound or conditional to see if any folding
9936 can then be done. Convert comparison to conditional for this purpose.
9937 The also optimizes non-constant cases that used to be done in
9938 expand_expr.
9939
9940 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9941 one of the operands is a comparison and the other is a comparison, a
9942 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9943 code below would make the expression more complex. Change it to a
9944 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9945 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9946
9947 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9948 || code == EQ_EXPR || code == NE_EXPR)
9949 && TREE_CODE (type) != VECTOR_TYPE
9950 && ((truth_value_p (TREE_CODE (arg0))
9951 && (truth_value_p (TREE_CODE (arg1))
9952 || (TREE_CODE (arg1) == BIT_AND_EXPR
9953 && integer_onep (TREE_OPERAND (arg1, 1)))))
9954 || (truth_value_p (TREE_CODE (arg1))
9955 && (truth_value_p (TREE_CODE (arg0))
9956 || (TREE_CODE (arg0) == BIT_AND_EXPR
9957 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9958 {
9959 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9960 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9961 : TRUTH_XOR_EXPR,
9962 boolean_type_node,
9963 fold_convert_loc (loc, boolean_type_node, arg0),
9964 fold_convert_loc (loc, boolean_type_node, arg1));
9965
9966 if (code == EQ_EXPR)
9967 tem = invert_truthvalue_loc (loc, tem);
9968
9969 return fold_convert_loc (loc, type, tem);
9970 }
9971
9972 if (TREE_CODE_CLASS (code) == tcc_binary
9973 || TREE_CODE_CLASS (code) == tcc_comparison)
9974 {
9975 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9976 {
9977 tem = fold_build2_loc (loc, code, type,
9978 fold_convert_loc (loc, TREE_TYPE (op0),
9979 TREE_OPERAND (arg0, 1)), op1);
9980 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9981 tem);
9982 }
9983 if (TREE_CODE (arg1) == COMPOUND_EXPR
9984 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9985 {
9986 tem = fold_build2_loc (loc, code, type, op0,
9987 fold_convert_loc (loc, TREE_TYPE (op1),
9988 TREE_OPERAND (arg1, 1)));
9989 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9990 tem);
9991 }
9992
9993 if (TREE_CODE (arg0) == COND_EXPR
9994 || TREE_CODE (arg0) == VEC_COND_EXPR
9995 || COMPARISON_CLASS_P (arg0))
9996 {
9997 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9998 arg0, arg1,
9999 /*cond_first_p=*/1);
10000 if (tem != NULL_TREE)
10001 return tem;
10002 }
10003
10004 if (TREE_CODE (arg1) == COND_EXPR
10005 || TREE_CODE (arg1) == VEC_COND_EXPR
10006 || COMPARISON_CLASS_P (arg1))
10007 {
10008 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10009 arg1, arg0,
10010 /*cond_first_p=*/0);
10011 if (tem != NULL_TREE)
10012 return tem;
10013 }
10014 }
10015
10016 switch (code)
10017 {
10018 case MEM_REF:
10019 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10020 if (TREE_CODE (arg0) == ADDR_EXPR
10021 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10022 {
10023 tree iref = TREE_OPERAND (arg0, 0);
10024 return fold_build2 (MEM_REF, type,
10025 TREE_OPERAND (iref, 0),
10026 int_const_binop (PLUS_EXPR, arg1,
10027 TREE_OPERAND (iref, 1)));
10028 }
10029
10030 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10031 if (TREE_CODE (arg0) == ADDR_EXPR
10032 && handled_component_p (TREE_OPERAND (arg0, 0)))
10033 {
10034 tree base;
10035 HOST_WIDE_INT coffset;
10036 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10037 &coffset);
10038 if (!base)
10039 return NULL_TREE;
10040 return fold_build2 (MEM_REF, type,
10041 build_fold_addr_expr (base),
10042 int_const_binop (PLUS_EXPR, arg1,
10043 size_int (coffset)));
10044 }
10045
10046 return NULL_TREE;
10047
10048 case POINTER_PLUS_EXPR:
10049 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10050 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10051 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10052 return fold_convert_loc (loc, type,
10053 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10054 fold_convert_loc (loc, sizetype,
10055 arg1),
10056 fold_convert_loc (loc, sizetype,
10057 arg0)));
10058
10059 return NULL_TREE;
10060
10061 case PLUS_EXPR:
10062 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10063 {
10064 /* X + (X / CST) * -CST is X % CST. */
10065 if (TREE_CODE (arg1) == MULT_EXPR
10066 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10067 && operand_equal_p (arg0,
10068 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10069 {
10070 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10071 tree cst1 = TREE_OPERAND (arg1, 1);
10072 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10073 cst1, cst0);
10074 if (sum && integer_zerop (sum))
10075 return fold_convert_loc (loc, type,
10076 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10077 TREE_TYPE (arg0), arg0,
10078 cst0));
10079 }
10080 }
10081
10082 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10083 one. Make sure the type is not saturating and has the signedness of
10084 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10085 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10086 if ((TREE_CODE (arg0) == MULT_EXPR
10087 || TREE_CODE (arg1) == MULT_EXPR)
10088 && !TYPE_SATURATING (type)
10089 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10090 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10091 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10092 {
10093 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10094 if (tem)
10095 return tem;
10096 }
10097
10098 if (! FLOAT_TYPE_P (type))
10099 {
10100 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10101 with a constant, and the two constants have no bits in common,
10102 we should treat this as a BIT_IOR_EXPR since this may produce more
10103 simplifications. */
10104 if (TREE_CODE (arg0) == BIT_AND_EXPR
10105 && TREE_CODE (arg1) == BIT_AND_EXPR
10106 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10107 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10108 && wi::bit_and (TREE_OPERAND (arg0, 1),
10109 TREE_OPERAND (arg1, 1)) == 0)
10110 {
10111 code = BIT_IOR_EXPR;
10112 goto bit_ior;
10113 }
10114
10115 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10116 (plus (plus (mult) (mult)) (foo)) so that we can
10117 take advantage of the factoring cases below. */
10118 if (ANY_INTEGRAL_TYPE_P (type)
10119 && TYPE_OVERFLOW_WRAPS (type)
10120 && (((TREE_CODE (arg0) == PLUS_EXPR
10121 || TREE_CODE (arg0) == MINUS_EXPR)
10122 && TREE_CODE (arg1) == MULT_EXPR)
10123 || ((TREE_CODE (arg1) == PLUS_EXPR
10124 || TREE_CODE (arg1) == MINUS_EXPR)
10125 && TREE_CODE (arg0) == MULT_EXPR)))
10126 {
10127 tree parg0, parg1, parg, marg;
10128 enum tree_code pcode;
10129
10130 if (TREE_CODE (arg1) == MULT_EXPR)
10131 parg = arg0, marg = arg1;
10132 else
10133 parg = arg1, marg = arg0;
10134 pcode = TREE_CODE (parg);
10135 parg0 = TREE_OPERAND (parg, 0);
10136 parg1 = TREE_OPERAND (parg, 1);
10137 STRIP_NOPS (parg0);
10138 STRIP_NOPS (parg1);
10139
10140 if (TREE_CODE (parg0) == MULT_EXPR
10141 && TREE_CODE (parg1) != MULT_EXPR)
10142 return fold_build2_loc (loc, pcode, type,
10143 fold_build2_loc (loc, PLUS_EXPR, type,
10144 fold_convert_loc (loc, type,
10145 parg0),
10146 fold_convert_loc (loc, type,
10147 marg)),
10148 fold_convert_loc (loc, type, parg1));
10149 if (TREE_CODE (parg0) != MULT_EXPR
10150 && TREE_CODE (parg1) == MULT_EXPR)
10151 return
10152 fold_build2_loc (loc, PLUS_EXPR, type,
10153 fold_convert_loc (loc, type, parg0),
10154 fold_build2_loc (loc, pcode, type,
10155 fold_convert_loc (loc, type, marg),
10156 fold_convert_loc (loc, type,
10157 parg1)));
10158 }
10159 }
10160 else
10161 {
10162 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10163 to __complex__ ( x, y ). This is not the same for SNaNs or
10164 if signed zeros are involved. */
10165 if (!HONOR_SNANS (element_mode (arg0))
10166 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10167 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10168 {
10169 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10170 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10171 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10172 bool arg0rz = false, arg0iz = false;
10173 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10174 || (arg0i && (arg0iz = real_zerop (arg0i))))
10175 {
10176 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10177 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10178 if (arg0rz && arg1i && real_zerop (arg1i))
10179 {
10180 tree rp = arg1r ? arg1r
10181 : build1 (REALPART_EXPR, rtype, arg1);
10182 tree ip = arg0i ? arg0i
10183 : build1 (IMAGPART_EXPR, rtype, arg0);
10184 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10185 }
10186 else if (arg0iz && arg1r && real_zerop (arg1r))
10187 {
10188 tree rp = arg0r ? arg0r
10189 : build1 (REALPART_EXPR, rtype, arg0);
10190 tree ip = arg1i ? arg1i
10191 : build1 (IMAGPART_EXPR, rtype, arg1);
10192 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10193 }
10194 }
10195 }
10196
10197 if (flag_unsafe_math_optimizations
10198 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10199 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10200 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10201 return tem;
10202
10203 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10204 We associate floats only if the user has specified
10205 -fassociative-math. */
10206 if (flag_associative_math
10207 && TREE_CODE (arg1) == PLUS_EXPR
10208 && TREE_CODE (arg0) != MULT_EXPR)
10209 {
10210 tree tree10 = TREE_OPERAND (arg1, 0);
10211 tree tree11 = TREE_OPERAND (arg1, 1);
10212 if (TREE_CODE (tree11) == MULT_EXPR
10213 && TREE_CODE (tree10) == MULT_EXPR)
10214 {
10215 tree tree0;
10216 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10217 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10218 }
10219 }
10220 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10221 We associate floats only if the user has specified
10222 -fassociative-math. */
10223 if (flag_associative_math
10224 && TREE_CODE (arg0) == PLUS_EXPR
10225 && TREE_CODE (arg1) != MULT_EXPR)
10226 {
10227 tree tree00 = TREE_OPERAND (arg0, 0);
10228 tree tree01 = TREE_OPERAND (arg0, 1);
10229 if (TREE_CODE (tree01) == MULT_EXPR
10230 && TREE_CODE (tree00) == MULT_EXPR)
10231 {
10232 tree tree0;
10233 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10234 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10235 }
10236 }
10237 }
10238
10239 bit_rotate:
10240 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10241 is a rotate of A by C1 bits. */
10242 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10243 is a rotate of A by B bits. */
10244 {
10245 enum tree_code code0, code1;
10246 tree rtype;
10247 code0 = TREE_CODE (arg0);
10248 code1 = TREE_CODE (arg1);
10249 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10250 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10251 && operand_equal_p (TREE_OPERAND (arg0, 0),
10252 TREE_OPERAND (arg1, 0), 0)
10253 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10254 TYPE_UNSIGNED (rtype))
10255 /* Only create rotates in complete modes. Other cases are not
10256 expanded properly. */
10257 && (element_precision (rtype)
10258 == element_precision (TYPE_MODE (rtype))))
10259 {
10260 tree tree01, tree11;
10261 enum tree_code code01, code11;
10262
10263 tree01 = TREE_OPERAND (arg0, 1);
10264 tree11 = TREE_OPERAND (arg1, 1);
10265 STRIP_NOPS (tree01);
10266 STRIP_NOPS (tree11);
10267 code01 = TREE_CODE (tree01);
10268 code11 = TREE_CODE (tree11);
10269 if (code01 == INTEGER_CST
10270 && code11 == INTEGER_CST
10271 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10272 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10273 {
10274 tem = build2_loc (loc, LROTATE_EXPR,
10275 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10276 TREE_OPERAND (arg0, 0),
10277 code0 == LSHIFT_EXPR
10278 ? TREE_OPERAND (arg0, 1)
10279 : TREE_OPERAND (arg1, 1));
10280 return fold_convert_loc (loc, type, tem);
10281 }
10282 else if (code11 == MINUS_EXPR)
10283 {
10284 tree tree110, tree111;
10285 tree110 = TREE_OPERAND (tree11, 0);
10286 tree111 = TREE_OPERAND (tree11, 1);
10287 STRIP_NOPS (tree110);
10288 STRIP_NOPS (tree111);
10289 if (TREE_CODE (tree110) == INTEGER_CST
10290 && 0 == compare_tree_int (tree110,
10291 element_precision
10292 (TREE_TYPE (TREE_OPERAND
10293 (arg0, 0))))
10294 && operand_equal_p (tree01, tree111, 0))
10295 return
10296 fold_convert_loc (loc, type,
10297 build2 ((code0 == LSHIFT_EXPR
10298 ? LROTATE_EXPR
10299 : RROTATE_EXPR),
10300 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10301 TREE_OPERAND (arg0, 0),
10302 TREE_OPERAND (arg0, 1)));
10303 }
10304 else if (code01 == MINUS_EXPR)
10305 {
10306 tree tree010, tree011;
10307 tree010 = TREE_OPERAND (tree01, 0);
10308 tree011 = TREE_OPERAND (tree01, 1);
10309 STRIP_NOPS (tree010);
10310 STRIP_NOPS (tree011);
10311 if (TREE_CODE (tree010) == INTEGER_CST
10312 && 0 == compare_tree_int (tree010,
10313 element_precision
10314 (TREE_TYPE (TREE_OPERAND
10315 (arg0, 0))))
10316 && operand_equal_p (tree11, tree011, 0))
10317 return fold_convert_loc
10318 (loc, type,
10319 build2 ((code0 != LSHIFT_EXPR
10320 ? LROTATE_EXPR
10321 : RROTATE_EXPR),
10322 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10323 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10324 }
10325 }
10326 }
10327
10328 associate:
10329 /* In most languages, can't associate operations on floats through
10330 parentheses. Rather than remember where the parentheses were, we
10331 don't associate floats at all, unless the user has specified
10332 -fassociative-math.
10333 And, we need to make sure type is not saturating. */
10334
10335 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10336 && !TYPE_SATURATING (type))
10337 {
10338 tree var0, con0, lit0, minus_lit0;
10339 tree var1, con1, lit1, minus_lit1;
10340 tree atype = type;
10341 bool ok = true;
10342
10343 /* Split both trees into variables, constants, and literals. Then
10344 associate each group together, the constants with literals,
10345 then the result with variables. This increases the chances of
10346 literals being recombined later and of generating relocatable
10347 expressions for the sum of a constant and literal. */
10348 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10349 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10350 code == MINUS_EXPR);
10351
10352 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10353 if (code == MINUS_EXPR)
10354 code = PLUS_EXPR;
10355
10356 /* With undefined overflow prefer doing association in a type
10357 which wraps on overflow, if that is one of the operand types. */
10358 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10359 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10360 {
10361 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10362 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10363 atype = TREE_TYPE (arg0);
10364 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10365 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10366 atype = TREE_TYPE (arg1);
10367 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10368 }
10369
10370 /* With undefined overflow we can only associate constants with one
10371 variable, and constants whose association doesn't overflow. */
10372 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10373 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10374 {
10375 if (var0 && var1)
10376 {
10377 tree tmp0 = var0;
10378 tree tmp1 = var1;
10379
10380 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10381 tmp0 = TREE_OPERAND (tmp0, 0);
10382 if (CONVERT_EXPR_P (tmp0)
10383 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10384 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10385 <= TYPE_PRECISION (atype)))
10386 tmp0 = TREE_OPERAND (tmp0, 0);
10387 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10388 tmp1 = TREE_OPERAND (tmp1, 0);
10389 if (CONVERT_EXPR_P (tmp1)
10390 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10391 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10392 <= TYPE_PRECISION (atype)))
10393 tmp1 = TREE_OPERAND (tmp1, 0);
10394 /* The only case we can still associate with two variables
10395 is if they are the same, modulo negation and bit-pattern
10396 preserving conversions. */
10397 if (!operand_equal_p (tmp0, tmp1, 0))
10398 ok = false;
10399 }
10400 }
10401
10402 /* Only do something if we found more than two objects. Otherwise,
10403 nothing has changed and we risk infinite recursion. */
10404 if (ok
10405 && (2 < ((var0 != 0) + (var1 != 0)
10406 + (con0 != 0) + (con1 != 0)
10407 + (lit0 != 0) + (lit1 != 0)
10408 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10409 {
10410 bool any_overflows = false;
10411 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10412 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10413 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10414 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10415 var0 = associate_trees (loc, var0, var1, code, atype);
10416 con0 = associate_trees (loc, con0, con1, code, atype);
10417 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10418 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10419 code, atype);
10420
10421 /* Preserve the MINUS_EXPR if the negative part of the literal is
10422 greater than the positive part. Otherwise, the multiplicative
10423 folding code (i.e extract_muldiv) may be fooled in case
10424 unsigned constants are subtracted, like in the following
10425 example: ((X*2 + 4) - 8U)/2. */
10426 if (minus_lit0 && lit0)
10427 {
10428 if (TREE_CODE (lit0) == INTEGER_CST
10429 && TREE_CODE (minus_lit0) == INTEGER_CST
10430 && tree_int_cst_lt (lit0, minus_lit0))
10431 {
10432 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10433 MINUS_EXPR, atype);
10434 lit0 = 0;
10435 }
10436 else
10437 {
10438 lit0 = associate_trees (loc, lit0, minus_lit0,
10439 MINUS_EXPR, atype);
10440 minus_lit0 = 0;
10441 }
10442 }
10443
10444 /* Don't introduce overflows through reassociation. */
10445 if (!any_overflows
10446 && ((lit0 && TREE_OVERFLOW_P (lit0))
10447 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10448 return NULL_TREE;
10449
10450 if (minus_lit0)
10451 {
10452 if (con0 == 0)
10453 return
10454 fold_convert_loc (loc, type,
10455 associate_trees (loc, var0, minus_lit0,
10456 MINUS_EXPR, atype));
10457 else
10458 {
10459 con0 = associate_trees (loc, con0, minus_lit0,
10460 MINUS_EXPR, atype);
10461 return
10462 fold_convert_loc (loc, type,
10463 associate_trees (loc, var0, con0,
10464 PLUS_EXPR, atype));
10465 }
10466 }
10467
10468 con0 = associate_trees (loc, con0, lit0, code, atype);
10469 return
10470 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10471 code, atype));
10472 }
10473 }
10474
10475 return NULL_TREE;
10476
10477 case MINUS_EXPR:
10478 /* Pointer simplifications for subtraction, simple reassociations. */
10479 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10480 {
10481 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10482 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10483 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10484 {
10485 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10486 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10487 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10488 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10489 return fold_build2_loc (loc, PLUS_EXPR, type,
10490 fold_build2_loc (loc, MINUS_EXPR, type,
10491 arg00, arg10),
10492 fold_build2_loc (loc, MINUS_EXPR, type,
10493 arg01, arg11));
10494 }
10495 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10496 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10497 {
10498 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10499 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10500 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10501 fold_convert_loc (loc, type, arg1));
10502 if (tmp)
10503 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10504 }
10505 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10506 simplifies. */
10507 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10508 {
10509 tree arg10 = fold_convert_loc (loc, type,
10510 TREE_OPERAND (arg1, 0));
10511 tree arg11 = fold_convert_loc (loc, type,
10512 TREE_OPERAND (arg1, 1));
10513 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10514 fold_convert_loc (loc, type, arg0),
10515 arg10);
10516 if (tmp)
10517 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10518 }
10519 }
10520 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10521 if (TREE_CODE (arg0) == NEGATE_EXPR
10522 && negate_expr_p (arg1)
10523 && reorder_operands_p (arg0, arg1))
10524 return fold_build2_loc (loc, MINUS_EXPR, type,
10525 fold_convert_loc (loc, type,
10526 negate_expr (arg1)),
10527 fold_convert_loc (loc, type,
10528 TREE_OPERAND (arg0, 0)));
10529
10530 /* X - (X / Y) * Y is X % Y. */
10531 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10532 && TREE_CODE (arg1) == MULT_EXPR
10533 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10534 && operand_equal_p (arg0,
10535 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10536 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10537 TREE_OPERAND (arg1, 1), 0))
10538 return
10539 fold_convert_loc (loc, type,
10540 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10541 arg0, TREE_OPERAND (arg1, 1)));
10542
10543 if (! FLOAT_TYPE_P (type))
10544 {
10545 /* Fold A - (A & B) into ~B & A. */
10546 if (!TREE_SIDE_EFFECTS (arg0)
10547 && TREE_CODE (arg1) == BIT_AND_EXPR)
10548 {
10549 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10550 {
10551 tree arg10 = fold_convert_loc (loc, type,
10552 TREE_OPERAND (arg1, 0));
10553 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10554 fold_build1_loc (loc, BIT_NOT_EXPR,
10555 type, arg10),
10556 fold_convert_loc (loc, type, arg0));
10557 }
10558 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10559 {
10560 tree arg11 = fold_convert_loc (loc,
10561 type, TREE_OPERAND (arg1, 1));
10562 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10563 fold_build1_loc (loc, BIT_NOT_EXPR,
10564 type, arg11),
10565 fold_convert_loc (loc, type, arg0));
10566 }
10567 }
10568
10569 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10570 any power of 2 minus 1. */
10571 if (TREE_CODE (arg0) == BIT_AND_EXPR
10572 && TREE_CODE (arg1) == BIT_AND_EXPR
10573 && operand_equal_p (TREE_OPERAND (arg0, 0),
10574 TREE_OPERAND (arg1, 0), 0))
10575 {
10576 tree mask0 = TREE_OPERAND (arg0, 1);
10577 tree mask1 = TREE_OPERAND (arg1, 1);
10578 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10579
10580 if (operand_equal_p (tem, mask1, 0))
10581 {
10582 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10583 TREE_OPERAND (arg0, 0), mask1);
10584 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10585 }
10586 }
10587 }
10588
10589 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10590 __complex__ ( x, -y ). This is not the same for SNaNs or if
10591 signed zeros are involved. */
10592 if (!HONOR_SNANS (element_mode (arg0))
10593 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10594 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10595 {
10596 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10597 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10598 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10599 bool arg0rz = false, arg0iz = false;
10600 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10601 || (arg0i && (arg0iz = real_zerop (arg0i))))
10602 {
10603 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10604 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10605 if (arg0rz && arg1i && real_zerop (arg1i))
10606 {
10607 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10608 arg1r ? arg1r
10609 : build1 (REALPART_EXPR, rtype, arg1));
10610 tree ip = arg0i ? arg0i
10611 : build1 (IMAGPART_EXPR, rtype, arg0);
10612 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10613 }
10614 else if (arg0iz && arg1r && real_zerop (arg1r))
10615 {
10616 tree rp = arg0r ? arg0r
10617 : build1 (REALPART_EXPR, rtype, arg0);
10618 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10619 arg1i ? arg1i
10620 : build1 (IMAGPART_EXPR, rtype, arg1));
10621 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10622 }
10623 }
10624 }
10625
10626 /* A - B -> A + (-B) if B is easily negatable. */
10627 if (negate_expr_p (arg1)
10628 && !TYPE_OVERFLOW_SANITIZED (type)
10629 && ((FLOAT_TYPE_P (type)
10630 /* Avoid this transformation if B is a positive REAL_CST. */
10631 && (TREE_CODE (arg1) != REAL_CST
10632 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10633 || INTEGRAL_TYPE_P (type)))
10634 return fold_build2_loc (loc, PLUS_EXPR, type,
10635 fold_convert_loc (loc, type, arg0),
10636 fold_convert_loc (loc, type,
10637 negate_expr (arg1)));
10638
10639 /* Try folding difference of addresses. */
10640 {
10641 HOST_WIDE_INT diff;
10642
10643 if ((TREE_CODE (arg0) == ADDR_EXPR
10644 || TREE_CODE (arg1) == ADDR_EXPR)
10645 && ptr_difference_const (arg0, arg1, &diff))
10646 return build_int_cst_type (type, diff);
10647 }
10648
10649 /* Fold &a[i] - &a[j] to i-j. */
10650 if (TREE_CODE (arg0) == ADDR_EXPR
10651 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10652 && TREE_CODE (arg1) == ADDR_EXPR
10653 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10654 {
10655 tree tem = fold_addr_of_array_ref_difference (loc, type,
10656 TREE_OPERAND (arg0, 0),
10657 TREE_OPERAND (arg1, 0));
10658 if (tem)
10659 return tem;
10660 }
10661
10662 if (FLOAT_TYPE_P (type)
10663 && flag_unsafe_math_optimizations
10664 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10665 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10666 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10667 return tem;
10668
10669 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10670 one. Make sure the type is not saturating and has the signedness of
10671 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10672 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10673 if ((TREE_CODE (arg0) == MULT_EXPR
10674 || TREE_CODE (arg1) == MULT_EXPR)
10675 && !TYPE_SATURATING (type)
10676 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10677 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10678 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10679 {
10680 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10681 if (tem)
10682 return tem;
10683 }
10684
10685 goto associate;
10686
10687 case MULT_EXPR:
10688 /* (-A) * (-B) -> A * B */
10689 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10690 return fold_build2_loc (loc, MULT_EXPR, type,
10691 fold_convert_loc (loc, type,
10692 TREE_OPERAND (arg0, 0)),
10693 fold_convert_loc (loc, type,
10694 negate_expr (arg1)));
10695 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10696 return fold_build2_loc (loc, MULT_EXPR, type,
10697 fold_convert_loc (loc, type,
10698 negate_expr (arg0)),
10699 fold_convert_loc (loc, type,
10700 TREE_OPERAND (arg1, 0)));
10701
10702 if (! FLOAT_TYPE_P (type))
10703 {
10704 /* Transform x * -C into -x * C if x is easily negatable. */
10705 if (TREE_CODE (arg1) == INTEGER_CST
10706 && tree_int_cst_sgn (arg1) == -1
10707 && negate_expr_p (arg0)
10708 && (tem = negate_expr (arg1)) != arg1
10709 && !TREE_OVERFLOW (tem))
10710 return fold_build2_loc (loc, MULT_EXPR, type,
10711 fold_convert_loc (loc, type,
10712 negate_expr (arg0)),
10713 tem);
10714
10715 /* (a * (1 << b)) is (a << b) */
10716 if (TREE_CODE (arg1) == LSHIFT_EXPR
10717 && integer_onep (TREE_OPERAND (arg1, 0)))
10718 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10719 TREE_OPERAND (arg1, 1));
10720 if (TREE_CODE (arg0) == LSHIFT_EXPR
10721 && integer_onep (TREE_OPERAND (arg0, 0)))
10722 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10723 TREE_OPERAND (arg0, 1));
10724
10725 /* (A + A) * C -> A * 2 * C */
10726 if (TREE_CODE (arg0) == PLUS_EXPR
10727 && TREE_CODE (arg1) == INTEGER_CST
10728 && operand_equal_p (TREE_OPERAND (arg0, 0),
10729 TREE_OPERAND (arg0, 1), 0))
10730 return fold_build2_loc (loc, MULT_EXPR, type,
10731 omit_one_operand_loc (loc, type,
10732 TREE_OPERAND (arg0, 0),
10733 TREE_OPERAND (arg0, 1)),
10734 fold_build2_loc (loc, MULT_EXPR, type,
10735 build_int_cst (type, 2) , arg1));
10736
10737 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10738 sign-changing only. */
10739 if (TREE_CODE (arg1) == INTEGER_CST
10740 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10741 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10742 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10743
10744 strict_overflow_p = false;
10745 if (TREE_CODE (arg1) == INTEGER_CST
10746 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10747 &strict_overflow_p)))
10748 {
10749 if (strict_overflow_p)
10750 fold_overflow_warning (("assuming signed overflow does not "
10751 "occur when simplifying "
10752 "multiplication"),
10753 WARN_STRICT_OVERFLOW_MISC);
10754 return fold_convert_loc (loc, type, tem);
10755 }
10756
10757 /* Optimize z * conj(z) for integer complex numbers. */
10758 if (TREE_CODE (arg0) == CONJ_EXPR
10759 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10760 return fold_mult_zconjz (loc, type, arg1);
10761 if (TREE_CODE (arg1) == CONJ_EXPR
10762 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10763 return fold_mult_zconjz (loc, type, arg0);
10764 }
10765 else
10766 {
10767 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10768 the result for floating point types due to rounding so it is applied
10769 only if -fassociative-math was specify. */
10770 if (flag_associative_math
10771 && TREE_CODE (arg0) == RDIV_EXPR
10772 && TREE_CODE (arg1) == REAL_CST
10773 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10774 {
10775 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10776 arg1);
10777 if (tem)
10778 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10779 TREE_OPERAND (arg0, 1));
10780 }
10781
10782 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10783 if (operand_equal_p (arg0, arg1, 0))
10784 {
10785 tree tem = fold_strip_sign_ops (arg0);
10786 if (tem != NULL_TREE)
10787 {
10788 tem = fold_convert_loc (loc, type, tem);
10789 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10790 }
10791 }
10792
10793 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10794 This is not the same for NaNs or if signed zeros are
10795 involved. */
10796 if (!HONOR_NANS (arg0)
10797 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10798 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10799 && TREE_CODE (arg1) == COMPLEX_CST
10800 && real_zerop (TREE_REALPART (arg1)))
10801 {
10802 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10803 if (real_onep (TREE_IMAGPART (arg1)))
10804 return
10805 fold_build2_loc (loc, COMPLEX_EXPR, type,
10806 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10807 rtype, arg0)),
10808 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10809 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10810 return
10811 fold_build2_loc (loc, COMPLEX_EXPR, type,
10812 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10813 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10814 rtype, arg0)));
10815 }
10816
10817 /* Optimize z * conj(z) for floating point complex numbers.
10818 Guarded by flag_unsafe_math_optimizations as non-finite
10819 imaginary components don't produce scalar results. */
10820 if (flag_unsafe_math_optimizations
10821 && TREE_CODE (arg0) == CONJ_EXPR
10822 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10823 return fold_mult_zconjz (loc, type, arg1);
10824 if (flag_unsafe_math_optimizations
10825 && TREE_CODE (arg1) == CONJ_EXPR
10826 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10827 return fold_mult_zconjz (loc, type, arg0);
10828
10829 if (flag_unsafe_math_optimizations)
10830 {
10831 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10832 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10833
10834 /* Optimizations of root(...)*root(...). */
10835 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10836 {
10837 tree rootfn, arg;
10838 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10839 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10840
10841 /* Optimize sqrt(x)*sqrt(x) as x. */
10842 if (BUILTIN_SQRT_P (fcode0)
10843 && operand_equal_p (arg00, arg10, 0)
10844 && ! HONOR_SNANS (element_mode (type)))
10845 return arg00;
10846
10847 /* Optimize root(x)*root(y) as root(x*y). */
10848 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10849 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10850 return build_call_expr_loc (loc, rootfn, 1, arg);
10851 }
10852
10853 /* Optimize expN(x)*expN(y) as expN(x+y). */
10854 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10855 {
10856 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10857 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10858 CALL_EXPR_ARG (arg0, 0),
10859 CALL_EXPR_ARG (arg1, 0));
10860 return build_call_expr_loc (loc, expfn, 1, arg);
10861 }
10862
10863 /* Optimizations of pow(...)*pow(...). */
10864 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10865 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10866 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10867 {
10868 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10869 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10870 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10871 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10872
10873 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10874 if (operand_equal_p (arg01, arg11, 0))
10875 {
10876 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10877 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10878 arg00, arg10);
10879 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10880 }
10881
10882 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10883 if (operand_equal_p (arg00, arg10, 0))
10884 {
10885 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10886 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10887 arg01, arg11);
10888 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10889 }
10890 }
10891
10892 /* Optimize tan(x)*cos(x) as sin(x). */
10893 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10894 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10895 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10896 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10897 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10898 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10899 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10900 CALL_EXPR_ARG (arg1, 0), 0))
10901 {
10902 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10903
10904 if (sinfn != NULL_TREE)
10905 return build_call_expr_loc (loc, sinfn, 1,
10906 CALL_EXPR_ARG (arg0, 0));
10907 }
10908
10909 /* Optimize x*pow(x,c) as pow(x,c+1). */
10910 if (fcode1 == BUILT_IN_POW
10911 || fcode1 == BUILT_IN_POWF
10912 || fcode1 == BUILT_IN_POWL)
10913 {
10914 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10915 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10916 if (TREE_CODE (arg11) == REAL_CST
10917 && !TREE_OVERFLOW (arg11)
10918 && operand_equal_p (arg0, arg10, 0))
10919 {
10920 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10921 REAL_VALUE_TYPE c;
10922 tree arg;
10923
10924 c = TREE_REAL_CST (arg11);
10925 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10926 arg = build_real (type, c);
10927 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10928 }
10929 }
10930
10931 /* Optimize pow(x,c)*x as pow(x,c+1). */
10932 if (fcode0 == BUILT_IN_POW
10933 || fcode0 == BUILT_IN_POWF
10934 || fcode0 == BUILT_IN_POWL)
10935 {
10936 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10937 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10938 if (TREE_CODE (arg01) == REAL_CST
10939 && !TREE_OVERFLOW (arg01)
10940 && operand_equal_p (arg1, arg00, 0))
10941 {
10942 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10943 REAL_VALUE_TYPE c;
10944 tree arg;
10945
10946 c = TREE_REAL_CST (arg01);
10947 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10948 arg = build_real (type, c);
10949 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10950 }
10951 }
10952
10953 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10954 if (!in_gimple_form
10955 && optimize
10956 && operand_equal_p (arg0, arg1, 0))
10957 {
10958 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10959
10960 if (powfn)
10961 {
10962 tree arg = build_real (type, dconst2);
10963 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10964 }
10965 }
10966 }
10967 }
10968 goto associate;
10969
10970 case BIT_IOR_EXPR:
10971 bit_ior:
10972 /* ~X | X is -1. */
10973 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10974 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10975 {
10976 t1 = build_zero_cst (type);
10977 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10978 return omit_one_operand_loc (loc, type, t1, arg1);
10979 }
10980
10981 /* X | ~X is -1. */
10982 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10983 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10984 {
10985 t1 = build_zero_cst (type);
10986 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10987 return omit_one_operand_loc (loc, type, t1, arg0);
10988 }
10989
10990 /* Canonicalize (X & C1) | C2. */
10991 if (TREE_CODE (arg0) == BIT_AND_EXPR
10992 && TREE_CODE (arg1) == INTEGER_CST
10993 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10994 {
10995 int width = TYPE_PRECISION (type), w;
10996 wide_int c1 = TREE_OPERAND (arg0, 1);
10997 wide_int c2 = arg1;
10998
10999 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11000 if ((c1 & c2) == c1)
11001 return omit_one_operand_loc (loc, type, arg1,
11002 TREE_OPERAND (arg0, 0));
11003
11004 wide_int msk = wi::mask (width, false,
11005 TYPE_PRECISION (TREE_TYPE (arg1)));
11006
11007 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11008 if (msk.and_not (c1 | c2) == 0)
11009 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11010 TREE_OPERAND (arg0, 0), arg1);
11011
11012 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11013 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11014 mode which allows further optimizations. */
11015 c1 &= msk;
11016 c2 &= msk;
11017 wide_int c3 = c1.and_not (c2);
11018 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11019 {
11020 wide_int mask = wi::mask (w, false,
11021 TYPE_PRECISION (type));
11022 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11023 {
11024 c3 = mask;
11025 break;
11026 }
11027 }
11028
11029 if (c3 != c1)
11030 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11031 fold_build2_loc (loc, BIT_AND_EXPR, type,
11032 TREE_OPERAND (arg0, 0),
11033 wide_int_to_tree (type,
11034 c3)),
11035 arg1);
11036 }
11037
11038 /* (X & ~Y) | (~X & Y) is X ^ Y */
11039 if (TREE_CODE (arg0) == BIT_AND_EXPR
11040 && TREE_CODE (arg1) == BIT_AND_EXPR)
11041 {
11042 tree a0, a1, l0, l1, n0, n1;
11043
11044 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11045 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11046
11047 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11048 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11049
11050 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11051 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11052
11053 if ((operand_equal_p (n0, a0, 0)
11054 && operand_equal_p (n1, a1, 0))
11055 || (operand_equal_p (n0, a1, 0)
11056 && operand_equal_p (n1, a0, 0)))
11057 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11058 }
11059
11060 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11061 if (t1 != NULL_TREE)
11062 return t1;
11063
11064 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11065
11066 This results in more efficient code for machines without a NAND
11067 instruction. Combine will canonicalize to the first form
11068 which will allow use of NAND instructions provided by the
11069 backend if they exist. */
11070 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11071 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11072 {
11073 return
11074 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11075 build2 (BIT_AND_EXPR, type,
11076 fold_convert_loc (loc, type,
11077 TREE_OPERAND (arg0, 0)),
11078 fold_convert_loc (loc, type,
11079 TREE_OPERAND (arg1, 0))));
11080 }
11081
11082 /* See if this can be simplified into a rotate first. If that
11083 is unsuccessful continue in the association code. */
11084 goto bit_rotate;
11085
11086 case BIT_XOR_EXPR:
11087 /* ~X ^ X is -1. */
11088 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11089 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11090 {
11091 t1 = build_zero_cst (type);
11092 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11093 return omit_one_operand_loc (loc, type, t1, arg1);
11094 }
11095
11096 /* X ^ ~X is -1. */
11097 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11098 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11099 {
11100 t1 = build_zero_cst (type);
11101 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11102 return omit_one_operand_loc (loc, type, t1, arg0);
11103 }
11104
11105 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11106 with a constant, and the two constants have no bits in common,
11107 we should treat this as a BIT_IOR_EXPR since this may produce more
11108 simplifications. */
11109 if (TREE_CODE (arg0) == BIT_AND_EXPR
11110 && TREE_CODE (arg1) == BIT_AND_EXPR
11111 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11112 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11113 && wi::bit_and (TREE_OPERAND (arg0, 1),
11114 TREE_OPERAND (arg1, 1)) == 0)
11115 {
11116 code = BIT_IOR_EXPR;
11117 goto bit_ior;
11118 }
11119
11120 /* (X | Y) ^ X -> Y & ~ X*/
11121 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11122 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11123 {
11124 tree t2 = TREE_OPERAND (arg0, 1);
11125 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11126 arg1);
11127 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11128 fold_convert_loc (loc, type, t2),
11129 fold_convert_loc (loc, type, t1));
11130 return t1;
11131 }
11132
11133 /* (Y | X) ^ X -> Y & ~ X*/
11134 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11135 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11136 {
11137 tree t2 = TREE_OPERAND (arg0, 0);
11138 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11139 arg1);
11140 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11141 fold_convert_loc (loc, type, t2),
11142 fold_convert_loc (loc, type, t1));
11143 return t1;
11144 }
11145
11146 /* X ^ (X | Y) -> Y & ~ X*/
11147 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11148 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11149 {
11150 tree t2 = TREE_OPERAND (arg1, 1);
11151 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11152 arg0);
11153 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11154 fold_convert_loc (loc, type, t2),
11155 fold_convert_loc (loc, type, t1));
11156 return t1;
11157 }
11158
11159 /* X ^ (Y | X) -> Y & ~ X*/
11160 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11161 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11162 {
11163 tree t2 = TREE_OPERAND (arg1, 0);
11164 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11165 arg0);
11166 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11167 fold_convert_loc (loc, type, t2),
11168 fold_convert_loc (loc, type, t1));
11169 return t1;
11170 }
11171
11172 /* Convert ~X ^ ~Y to X ^ Y. */
11173 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11174 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11175 return fold_build2_loc (loc, code, type,
11176 fold_convert_loc (loc, type,
11177 TREE_OPERAND (arg0, 0)),
11178 fold_convert_loc (loc, type,
11179 TREE_OPERAND (arg1, 0)));
11180
11181 /* Convert ~X ^ C to X ^ ~C. */
11182 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11183 && TREE_CODE (arg1) == INTEGER_CST)
11184 return fold_build2_loc (loc, code, type,
11185 fold_convert_loc (loc, type,
11186 TREE_OPERAND (arg0, 0)),
11187 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11188
11189 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11190 if (TREE_CODE (arg0) == BIT_AND_EXPR
11191 && INTEGRAL_TYPE_P (type)
11192 && integer_onep (TREE_OPERAND (arg0, 1))
11193 && integer_onep (arg1))
11194 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11195 build_zero_cst (TREE_TYPE (arg0)));
11196
11197 /* Fold (X & Y) ^ Y as ~X & Y. */
11198 if (TREE_CODE (arg0) == BIT_AND_EXPR
11199 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11200 {
11201 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11202 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11203 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11204 fold_convert_loc (loc, type, arg1));
11205 }
11206 /* Fold (X & Y) ^ X as ~Y & X. */
11207 if (TREE_CODE (arg0) == BIT_AND_EXPR
11208 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11209 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11210 {
11211 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11212 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11213 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11214 fold_convert_loc (loc, type, arg1));
11215 }
11216 /* Fold X ^ (X & Y) as X & ~Y. */
11217 if (TREE_CODE (arg1) == BIT_AND_EXPR
11218 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11219 {
11220 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11221 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11222 fold_convert_loc (loc, type, arg0),
11223 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11224 }
11225 /* Fold X ^ (Y & X) as ~Y & X. */
11226 if (TREE_CODE (arg1) == BIT_AND_EXPR
11227 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11228 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11229 {
11230 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11231 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11232 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11233 fold_convert_loc (loc, type, arg0));
11234 }
11235
11236 /* See if this can be simplified into a rotate first. If that
11237 is unsuccessful continue in the association code. */
11238 goto bit_rotate;
11239
11240 case BIT_AND_EXPR:
11241 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11242 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11243 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11244 || (TREE_CODE (arg0) == EQ_EXPR
11245 && integer_zerop (TREE_OPERAND (arg0, 1))))
11246 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11247 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11248
11249 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11250 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11251 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11252 || (TREE_CODE (arg1) == EQ_EXPR
11253 && integer_zerop (TREE_OPERAND (arg1, 1))))
11254 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11255 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11256
11257 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11258 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11259 && INTEGRAL_TYPE_P (type)
11260 && integer_onep (TREE_OPERAND (arg0, 1))
11261 && integer_onep (arg1))
11262 {
11263 tree tem2;
11264 tem = TREE_OPERAND (arg0, 0);
11265 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11266 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11267 tem, tem2);
11268 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11269 build_zero_cst (TREE_TYPE (tem)));
11270 }
11271 /* Fold ~X & 1 as (X & 1) == 0. */
11272 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11273 && INTEGRAL_TYPE_P (type)
11274 && integer_onep (arg1))
11275 {
11276 tree tem2;
11277 tem = TREE_OPERAND (arg0, 0);
11278 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11279 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11280 tem, tem2);
11281 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11282 build_zero_cst (TREE_TYPE (tem)));
11283 }
11284 /* Fold !X & 1 as X == 0. */
11285 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11286 && integer_onep (arg1))
11287 {
11288 tem = TREE_OPERAND (arg0, 0);
11289 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11290 build_zero_cst (TREE_TYPE (tem)));
11291 }
11292
11293 /* Fold (X ^ Y) & Y as ~X & Y. */
11294 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11295 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11296 {
11297 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11298 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11299 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11300 fold_convert_loc (loc, type, arg1));
11301 }
11302 /* Fold (X ^ Y) & X as ~Y & X. */
11303 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11304 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11305 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11306 {
11307 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11308 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11309 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11310 fold_convert_loc (loc, type, arg1));
11311 }
11312 /* Fold X & (X ^ Y) as X & ~Y. */
11313 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11314 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11315 {
11316 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11317 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11318 fold_convert_loc (loc, type, arg0),
11319 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11320 }
11321 /* Fold X & (Y ^ X) as ~Y & X. */
11322 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11323 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11324 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11325 {
11326 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11327 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11328 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11329 fold_convert_loc (loc, type, arg0));
11330 }
11331
11332 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11333 multiple of 1 << CST. */
11334 if (TREE_CODE (arg1) == INTEGER_CST)
11335 {
11336 wide_int cst1 = arg1;
11337 wide_int ncst1 = -cst1;
11338 if ((cst1 & ncst1) == ncst1
11339 && multiple_of_p (type, arg0,
11340 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11341 return fold_convert_loc (loc, type, arg0);
11342 }
11343
11344 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11345 bits from CST2. */
11346 if (TREE_CODE (arg1) == INTEGER_CST
11347 && TREE_CODE (arg0) == MULT_EXPR
11348 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11349 {
11350 wide_int warg1 = arg1;
11351 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11352
11353 if (masked == 0)
11354 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11355 arg0, arg1);
11356 else if (masked != warg1)
11357 {
11358 /* Avoid the transform if arg1 is a mask of some
11359 mode which allows further optimizations. */
11360 int pop = wi::popcount (warg1);
11361 if (!(pop >= BITS_PER_UNIT
11362 && exact_log2 (pop) != -1
11363 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11364 return fold_build2_loc (loc, code, type, op0,
11365 wide_int_to_tree (type, masked));
11366 }
11367 }
11368
11369 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11370 ((A & N) + B) & M -> (A + B) & M
11371 Similarly if (N & M) == 0,
11372 ((A | N) + B) & M -> (A + B) & M
11373 and for - instead of + (or unary - instead of +)
11374 and/or ^ instead of |.
11375 If B is constant and (B & M) == 0, fold into A & M. */
11376 if (TREE_CODE (arg1) == INTEGER_CST)
11377 {
11378 wide_int cst1 = arg1;
11379 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11380 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11381 && (TREE_CODE (arg0) == PLUS_EXPR
11382 || TREE_CODE (arg0) == MINUS_EXPR
11383 || TREE_CODE (arg0) == NEGATE_EXPR)
11384 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11385 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11386 {
11387 tree pmop[2];
11388 int which = 0;
11389 wide_int cst0;
11390
11391 /* Now we know that arg0 is (C + D) or (C - D) or
11392 -C and arg1 (M) is == (1LL << cst) - 1.
11393 Store C into PMOP[0] and D into PMOP[1]. */
11394 pmop[0] = TREE_OPERAND (arg0, 0);
11395 pmop[1] = NULL;
11396 if (TREE_CODE (arg0) != NEGATE_EXPR)
11397 {
11398 pmop[1] = TREE_OPERAND (arg0, 1);
11399 which = 1;
11400 }
11401
11402 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11403 which = -1;
11404
11405 for (; which >= 0; which--)
11406 switch (TREE_CODE (pmop[which]))
11407 {
11408 case BIT_AND_EXPR:
11409 case BIT_IOR_EXPR:
11410 case BIT_XOR_EXPR:
11411 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11412 != INTEGER_CST)
11413 break;
11414 cst0 = TREE_OPERAND (pmop[which], 1);
11415 cst0 &= cst1;
11416 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11417 {
11418 if (cst0 != cst1)
11419 break;
11420 }
11421 else if (cst0 != 0)
11422 break;
11423 /* If C or D is of the form (A & N) where
11424 (N & M) == M, or of the form (A | N) or
11425 (A ^ N) where (N & M) == 0, replace it with A. */
11426 pmop[which] = TREE_OPERAND (pmop[which], 0);
11427 break;
11428 case INTEGER_CST:
11429 /* If C or D is a N where (N & M) == 0, it can be
11430 omitted (assumed 0). */
11431 if ((TREE_CODE (arg0) == PLUS_EXPR
11432 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11433 && (cst1 & pmop[which]) == 0)
11434 pmop[which] = NULL;
11435 break;
11436 default:
11437 break;
11438 }
11439
11440 /* Only build anything new if we optimized one or both arguments
11441 above. */
11442 if (pmop[0] != TREE_OPERAND (arg0, 0)
11443 || (TREE_CODE (arg0) != NEGATE_EXPR
11444 && pmop[1] != TREE_OPERAND (arg0, 1)))
11445 {
11446 tree utype = TREE_TYPE (arg0);
11447 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11448 {
11449 /* Perform the operations in a type that has defined
11450 overflow behavior. */
11451 utype = unsigned_type_for (TREE_TYPE (arg0));
11452 if (pmop[0] != NULL)
11453 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11454 if (pmop[1] != NULL)
11455 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11456 }
11457
11458 if (TREE_CODE (arg0) == NEGATE_EXPR)
11459 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11460 else if (TREE_CODE (arg0) == PLUS_EXPR)
11461 {
11462 if (pmop[0] != NULL && pmop[1] != NULL)
11463 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11464 pmop[0], pmop[1]);
11465 else if (pmop[0] != NULL)
11466 tem = pmop[0];
11467 else if (pmop[1] != NULL)
11468 tem = pmop[1];
11469 else
11470 return build_int_cst (type, 0);
11471 }
11472 else if (pmop[0] == NULL)
11473 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11474 else
11475 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11476 pmop[0], pmop[1]);
11477 /* TEM is now the new binary +, - or unary - replacement. */
11478 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11479 fold_convert_loc (loc, utype, arg1));
11480 return fold_convert_loc (loc, type, tem);
11481 }
11482 }
11483 }
11484
11485 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11486 if (t1 != NULL_TREE)
11487 return t1;
11488 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11489 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11490 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11491 {
11492 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11493
11494 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11495 if (mask == -1)
11496 return
11497 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11498 }
11499
11500 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11501
11502 This results in more efficient code for machines without a NOR
11503 instruction. Combine will canonicalize to the first form
11504 which will allow use of NOR instructions provided by the
11505 backend if they exist. */
11506 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11507 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11508 {
11509 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11510 build2 (BIT_IOR_EXPR, type,
11511 fold_convert_loc (loc, type,
11512 TREE_OPERAND (arg0, 0)),
11513 fold_convert_loc (loc, type,
11514 TREE_OPERAND (arg1, 0))));
11515 }
11516
11517 /* If arg0 is derived from the address of an object or function, we may
11518 be able to fold this expression using the object or function's
11519 alignment. */
11520 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11521 {
11522 unsigned HOST_WIDE_INT modulus, residue;
11523 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11524
11525 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11526 integer_onep (arg1));
11527
11528 /* This works because modulus is a power of 2. If this weren't the
11529 case, we'd have to replace it by its greatest power-of-2
11530 divisor: modulus & -modulus. */
11531 if (low < modulus)
11532 return build_int_cst (type, residue & low);
11533 }
11534
11535 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11536 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11537 if the new mask might be further optimized. */
11538 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11539 || TREE_CODE (arg0) == RSHIFT_EXPR)
11540 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11541 && TREE_CODE (arg1) == INTEGER_CST
11542 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11543 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11544 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11545 < TYPE_PRECISION (TREE_TYPE (arg0))))
11546 {
11547 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11548 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11549 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11550 tree shift_type = TREE_TYPE (arg0);
11551
11552 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11553 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11554 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11555 && TYPE_PRECISION (TREE_TYPE (arg0))
11556 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11557 {
11558 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11559 tree arg00 = TREE_OPERAND (arg0, 0);
11560 /* See if more bits can be proven as zero because of
11561 zero extension. */
11562 if (TREE_CODE (arg00) == NOP_EXPR
11563 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11564 {
11565 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11566 if (TYPE_PRECISION (inner_type)
11567 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11568 && TYPE_PRECISION (inner_type) < prec)
11569 {
11570 prec = TYPE_PRECISION (inner_type);
11571 /* See if we can shorten the right shift. */
11572 if (shiftc < prec)
11573 shift_type = inner_type;
11574 /* Otherwise X >> C1 is all zeros, so we'll optimize
11575 it into (X, 0) later on by making sure zerobits
11576 is all ones. */
11577 }
11578 }
11579 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11580 if (shiftc < prec)
11581 {
11582 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11583 zerobits <<= prec - shiftc;
11584 }
11585 /* For arithmetic shift if sign bit could be set, zerobits
11586 can contain actually sign bits, so no transformation is
11587 possible, unless MASK masks them all away. In that
11588 case the shift needs to be converted into logical shift. */
11589 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11590 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11591 {
11592 if ((mask & zerobits) == 0)
11593 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11594 else
11595 zerobits = 0;
11596 }
11597 }
11598
11599 /* ((X << 16) & 0xff00) is (X, 0). */
11600 if ((mask & zerobits) == mask)
11601 return omit_one_operand_loc (loc, type,
11602 build_int_cst (type, 0), arg0);
11603
11604 newmask = mask | zerobits;
11605 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11606 {
11607 /* Only do the transformation if NEWMASK is some integer
11608 mode's mask. */
11609 for (prec = BITS_PER_UNIT;
11610 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11611 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11612 break;
11613 if (prec < HOST_BITS_PER_WIDE_INT
11614 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11615 {
11616 tree newmaskt;
11617
11618 if (shift_type != TREE_TYPE (arg0))
11619 {
11620 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11621 fold_convert_loc (loc, shift_type,
11622 TREE_OPERAND (arg0, 0)),
11623 TREE_OPERAND (arg0, 1));
11624 tem = fold_convert_loc (loc, type, tem);
11625 }
11626 else
11627 tem = op0;
11628 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11629 if (!tree_int_cst_equal (newmaskt, arg1))
11630 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11631 }
11632 }
11633 }
11634
11635 goto associate;
11636
11637 case RDIV_EXPR:
11638 /* Don't touch a floating-point divide by zero unless the mode
11639 of the constant can represent infinity. */
11640 if (TREE_CODE (arg1) == REAL_CST
11641 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11642 && real_zerop (arg1))
11643 return NULL_TREE;
11644
11645 /* (-A) / (-B) -> A / B */
11646 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11647 return fold_build2_loc (loc, RDIV_EXPR, type,
11648 TREE_OPERAND (arg0, 0),
11649 negate_expr (arg1));
11650 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11651 return fold_build2_loc (loc, RDIV_EXPR, type,
11652 negate_expr (arg0),
11653 TREE_OPERAND (arg1, 0));
11654
11655 /* Convert A/B/C to A/(B*C). */
11656 if (flag_reciprocal_math
11657 && TREE_CODE (arg0) == RDIV_EXPR)
11658 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11659 fold_build2_loc (loc, MULT_EXPR, type,
11660 TREE_OPERAND (arg0, 1), arg1));
11661
11662 /* Convert A/(B/C) to (A/B)*C. */
11663 if (flag_reciprocal_math
11664 && TREE_CODE (arg1) == RDIV_EXPR)
11665 return fold_build2_loc (loc, MULT_EXPR, type,
11666 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11667 TREE_OPERAND (arg1, 0)),
11668 TREE_OPERAND (arg1, 1));
11669
11670 /* Convert C1/(X*C2) into (C1/C2)/X. */
11671 if (flag_reciprocal_math
11672 && TREE_CODE (arg1) == MULT_EXPR
11673 && TREE_CODE (arg0) == REAL_CST
11674 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11675 {
11676 tree tem = const_binop (RDIV_EXPR, arg0,
11677 TREE_OPERAND (arg1, 1));
11678 if (tem)
11679 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11680 TREE_OPERAND (arg1, 0));
11681 }
11682
11683 if (flag_unsafe_math_optimizations)
11684 {
11685 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11686 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11687
11688 /* Optimize sin(x)/cos(x) as tan(x). */
11689 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11690 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11691 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11692 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11693 CALL_EXPR_ARG (arg1, 0), 0))
11694 {
11695 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11696
11697 if (tanfn != NULL_TREE)
11698 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11699 }
11700
11701 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11702 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11703 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11704 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11705 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11706 CALL_EXPR_ARG (arg1, 0), 0))
11707 {
11708 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11709
11710 if (tanfn != NULL_TREE)
11711 {
11712 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11713 CALL_EXPR_ARG (arg0, 0));
11714 return fold_build2_loc (loc, RDIV_EXPR, type,
11715 build_real (type, dconst1), tmp);
11716 }
11717 }
11718
11719 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11720 NaNs or Infinities. */
11721 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11722 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11723 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11724 {
11725 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11726 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11727
11728 if (! HONOR_NANS (arg00)
11729 && ! HONOR_INFINITIES (element_mode (arg00))
11730 && operand_equal_p (arg00, arg01, 0))
11731 {
11732 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11733
11734 if (cosfn != NULL_TREE)
11735 return build_call_expr_loc (loc, cosfn, 1, arg00);
11736 }
11737 }
11738
11739 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11740 NaNs or Infinities. */
11741 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11742 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11743 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11744 {
11745 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11746 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11747
11748 if (! HONOR_NANS (arg00)
11749 && ! HONOR_INFINITIES (element_mode (arg00))
11750 && operand_equal_p (arg00, arg01, 0))
11751 {
11752 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11753
11754 if (cosfn != NULL_TREE)
11755 {
11756 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11757 return fold_build2_loc (loc, RDIV_EXPR, type,
11758 build_real (type, dconst1),
11759 tmp);
11760 }
11761 }
11762 }
11763
11764 /* Optimize pow(x,c)/x as pow(x,c-1). */
11765 if (fcode0 == BUILT_IN_POW
11766 || fcode0 == BUILT_IN_POWF
11767 || fcode0 == BUILT_IN_POWL)
11768 {
11769 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11770 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11771 if (TREE_CODE (arg01) == REAL_CST
11772 && !TREE_OVERFLOW (arg01)
11773 && operand_equal_p (arg1, arg00, 0))
11774 {
11775 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11776 REAL_VALUE_TYPE c;
11777 tree arg;
11778
11779 c = TREE_REAL_CST (arg01);
11780 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11781 arg = build_real (type, c);
11782 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11783 }
11784 }
11785
11786 /* Optimize a/root(b/c) into a*root(c/b). */
11787 if (BUILTIN_ROOT_P (fcode1))
11788 {
11789 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11790
11791 if (TREE_CODE (rootarg) == RDIV_EXPR)
11792 {
11793 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11794 tree b = TREE_OPERAND (rootarg, 0);
11795 tree c = TREE_OPERAND (rootarg, 1);
11796
11797 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11798
11799 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11800 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11801 }
11802 }
11803
11804 /* Optimize x/expN(y) into x*expN(-y). */
11805 if (BUILTIN_EXPONENT_P (fcode1))
11806 {
11807 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11808 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11809 arg1 = build_call_expr_loc (loc,
11810 expfn, 1,
11811 fold_convert_loc (loc, type, arg));
11812 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11813 }
11814
11815 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11816 if (fcode1 == BUILT_IN_POW
11817 || fcode1 == BUILT_IN_POWF
11818 || fcode1 == BUILT_IN_POWL)
11819 {
11820 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11821 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11822 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11823 tree neg11 = fold_convert_loc (loc, type,
11824 negate_expr (arg11));
11825 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11826 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11827 }
11828 }
11829 return NULL_TREE;
11830
11831 case TRUNC_DIV_EXPR:
11832 /* Optimize (X & (-A)) / A where A is a power of 2,
11833 to X >> log2(A) */
11834 if (TREE_CODE (arg0) == BIT_AND_EXPR
11835 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11836 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11837 {
11838 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11839 arg1, TREE_OPERAND (arg0, 1));
11840 if (sum && integer_zerop (sum)) {
11841 tree pow2 = build_int_cst (integer_type_node,
11842 wi::exact_log2 (arg1));
11843 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11844 TREE_OPERAND (arg0, 0), pow2);
11845 }
11846 }
11847
11848 /* Fall through */
11849
11850 case FLOOR_DIV_EXPR:
11851 /* Simplify A / (B << N) where A and B are positive and B is
11852 a power of 2, to A >> (N + log2(B)). */
11853 strict_overflow_p = false;
11854 if (TREE_CODE (arg1) == LSHIFT_EXPR
11855 && (TYPE_UNSIGNED (type)
11856 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11857 {
11858 tree sval = TREE_OPERAND (arg1, 0);
11859 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11860 {
11861 tree sh_cnt = TREE_OPERAND (arg1, 1);
11862 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11863 wi::exact_log2 (sval));
11864
11865 if (strict_overflow_p)
11866 fold_overflow_warning (("assuming signed overflow does not "
11867 "occur when simplifying A / (B << N)"),
11868 WARN_STRICT_OVERFLOW_MISC);
11869
11870 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11871 sh_cnt, pow2);
11872 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11873 fold_convert_loc (loc, type, arg0), sh_cnt);
11874 }
11875 }
11876
11877 /* Fall through */
11878
11879 case ROUND_DIV_EXPR:
11880 case CEIL_DIV_EXPR:
11881 case EXACT_DIV_EXPR:
11882 if (integer_zerop (arg1))
11883 return NULL_TREE;
11884
11885 /* Convert -A / -B to A / B when the type is signed and overflow is
11886 undefined. */
11887 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11888 && TREE_CODE (arg0) == NEGATE_EXPR
11889 && negate_expr_p (arg1))
11890 {
11891 if (INTEGRAL_TYPE_P (type))
11892 fold_overflow_warning (("assuming signed overflow does not occur "
11893 "when distributing negation across "
11894 "division"),
11895 WARN_STRICT_OVERFLOW_MISC);
11896 return fold_build2_loc (loc, code, type,
11897 fold_convert_loc (loc, type,
11898 TREE_OPERAND (arg0, 0)),
11899 fold_convert_loc (loc, type,
11900 negate_expr (arg1)));
11901 }
11902 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11903 && TREE_CODE (arg1) == NEGATE_EXPR
11904 && negate_expr_p (arg0))
11905 {
11906 if (INTEGRAL_TYPE_P (type))
11907 fold_overflow_warning (("assuming signed overflow does not occur "
11908 "when distributing negation across "
11909 "division"),
11910 WARN_STRICT_OVERFLOW_MISC);
11911 return fold_build2_loc (loc, code, type,
11912 fold_convert_loc (loc, type,
11913 negate_expr (arg0)),
11914 fold_convert_loc (loc, type,
11915 TREE_OPERAND (arg1, 0)));
11916 }
11917
11918 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11919 operation, EXACT_DIV_EXPR.
11920
11921 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11922 At one time others generated faster code, it's not clear if they do
11923 after the last round to changes to the DIV code in expmed.c. */
11924 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11925 && multiple_of_p (type, arg0, arg1))
11926 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11927
11928 strict_overflow_p = false;
11929 if (TREE_CODE (arg1) == INTEGER_CST
11930 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11931 &strict_overflow_p)))
11932 {
11933 if (strict_overflow_p)
11934 fold_overflow_warning (("assuming signed overflow does not occur "
11935 "when simplifying division"),
11936 WARN_STRICT_OVERFLOW_MISC);
11937 return fold_convert_loc (loc, type, tem);
11938 }
11939
11940 return NULL_TREE;
11941
11942 case CEIL_MOD_EXPR:
11943 case FLOOR_MOD_EXPR:
11944 case ROUND_MOD_EXPR:
11945 case TRUNC_MOD_EXPR:
11946 /* X % -Y is the same as X % Y. */
11947 if (code == TRUNC_MOD_EXPR
11948 && !TYPE_UNSIGNED (type)
11949 && TREE_CODE (arg1) == NEGATE_EXPR
11950 && !TYPE_OVERFLOW_TRAPS (type))
11951 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11952 fold_convert_loc (loc, type,
11953 TREE_OPERAND (arg1, 0)));
11954
11955 strict_overflow_p = false;
11956 if (TREE_CODE (arg1) == INTEGER_CST
11957 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11958 &strict_overflow_p)))
11959 {
11960 if (strict_overflow_p)
11961 fold_overflow_warning (("assuming signed overflow does not occur "
11962 "when simplifying modulus"),
11963 WARN_STRICT_OVERFLOW_MISC);
11964 return fold_convert_loc (loc, type, tem);
11965 }
11966
11967 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11968 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11969 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11970 && (TYPE_UNSIGNED (type)
11971 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11972 {
11973 tree c = arg1;
11974 /* Also optimize A % (C << N) where C is a power of 2,
11975 to A & ((C << N) - 1). */
11976 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11977 c = TREE_OPERAND (arg1, 0);
11978
11979 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11980 {
11981 tree mask
11982 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11983 build_int_cst (TREE_TYPE (arg1), 1));
11984 if (strict_overflow_p)
11985 fold_overflow_warning (("assuming signed overflow does not "
11986 "occur when simplifying "
11987 "X % (power of two)"),
11988 WARN_STRICT_OVERFLOW_MISC);
11989 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11990 fold_convert_loc (loc, type, arg0),
11991 fold_convert_loc (loc, type, mask));
11992 }
11993 }
11994
11995 return NULL_TREE;
11996
11997 case LROTATE_EXPR:
11998 case RROTATE_EXPR:
11999 case RSHIFT_EXPR:
12000 case LSHIFT_EXPR:
12001 /* Since negative shift count is not well-defined,
12002 don't try to compute it in the compiler. */
12003 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12004 return NULL_TREE;
12005
12006 prec = element_precision (type);
12007
12008 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12009 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12010 && tree_to_uhwi (arg1) < prec
12011 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12012 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12013 {
12014 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12015 + tree_to_uhwi (arg1));
12016
12017 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12018 being well defined. */
12019 if (low >= prec)
12020 {
12021 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12022 low = low % prec;
12023 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12024 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12025 TREE_OPERAND (arg0, 0));
12026 else
12027 low = prec - 1;
12028 }
12029
12030 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12031 build_int_cst (TREE_TYPE (arg1), low));
12032 }
12033
12034 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12035 into x & ((unsigned)-1 >> c) for unsigned types. */
12036 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12037 || (TYPE_UNSIGNED (type)
12038 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12039 && tree_fits_uhwi_p (arg1)
12040 && tree_to_uhwi (arg1) < prec
12041 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12042 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12043 {
12044 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12045 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12046 tree lshift;
12047 tree arg00;
12048
12049 if (low0 == low1)
12050 {
12051 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12052
12053 lshift = build_minus_one_cst (type);
12054 lshift = const_binop (code, lshift, arg1);
12055
12056 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12057 }
12058 }
12059
12060 /* If we have a rotate of a bit operation with the rotate count and
12061 the second operand of the bit operation both constant,
12062 permute the two operations. */
12063 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12064 && (TREE_CODE (arg0) == BIT_AND_EXPR
12065 || TREE_CODE (arg0) == BIT_IOR_EXPR
12066 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12067 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12068 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12069 fold_build2_loc (loc, code, type,
12070 TREE_OPERAND (arg0, 0), arg1),
12071 fold_build2_loc (loc, code, type,
12072 TREE_OPERAND (arg0, 1), arg1));
12073
12074 /* Two consecutive rotates adding up to the some integer
12075 multiple of the precision of the type can be ignored. */
12076 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12077 && TREE_CODE (arg0) == RROTATE_EXPR
12078 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12079 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12080 prec) == 0)
12081 return TREE_OPERAND (arg0, 0);
12082
12083 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12084 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12085 if the latter can be further optimized. */
12086 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12087 && TREE_CODE (arg0) == BIT_AND_EXPR
12088 && TREE_CODE (arg1) == INTEGER_CST
12089 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12090 {
12091 tree mask = fold_build2_loc (loc, code, type,
12092 fold_convert_loc (loc, type,
12093 TREE_OPERAND (arg0, 1)),
12094 arg1);
12095 tree shift = fold_build2_loc (loc, code, type,
12096 fold_convert_loc (loc, type,
12097 TREE_OPERAND (arg0, 0)),
12098 arg1);
12099 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12100 if (tem)
12101 return tem;
12102 }
12103
12104 return NULL_TREE;
12105
12106 case MIN_EXPR:
12107 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12108 if (tem)
12109 return tem;
12110 goto associate;
12111
12112 case MAX_EXPR:
12113 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12114 if (tem)
12115 return tem;
12116 goto associate;
12117
12118 case TRUTH_ANDIF_EXPR:
12119 /* Note that the operands of this must be ints
12120 and their values must be 0 or 1.
12121 ("true" is a fixed value perhaps depending on the language.) */
12122 /* If first arg is constant zero, return it. */
12123 if (integer_zerop (arg0))
12124 return fold_convert_loc (loc, type, arg0);
12125 case TRUTH_AND_EXPR:
12126 /* If either arg is constant true, drop it. */
12127 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12128 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12129 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12130 /* Preserve sequence points. */
12131 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12132 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12133 /* If second arg is constant zero, result is zero, but first arg
12134 must be evaluated. */
12135 if (integer_zerop (arg1))
12136 return omit_one_operand_loc (loc, type, arg1, arg0);
12137 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12138 case will be handled here. */
12139 if (integer_zerop (arg0))
12140 return omit_one_operand_loc (loc, type, arg0, arg1);
12141
12142 /* !X && X is always false. */
12143 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12144 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12145 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12146 /* X && !X is always false. */
12147 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12148 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12149 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12150
12151 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12152 means A >= Y && A != MAX, but in this case we know that
12153 A < X <= MAX. */
12154
12155 if (!TREE_SIDE_EFFECTS (arg0)
12156 && !TREE_SIDE_EFFECTS (arg1))
12157 {
12158 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12159 if (tem && !operand_equal_p (tem, arg0, 0))
12160 return fold_build2_loc (loc, code, type, tem, arg1);
12161
12162 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12163 if (tem && !operand_equal_p (tem, arg1, 0))
12164 return fold_build2_loc (loc, code, type, arg0, tem);
12165 }
12166
12167 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12168 != NULL_TREE)
12169 return tem;
12170
12171 return NULL_TREE;
12172
12173 case TRUTH_ORIF_EXPR:
12174 /* Note that the operands of this must be ints
12175 and their values must be 0 or true.
12176 ("true" is a fixed value perhaps depending on the language.) */
12177 /* If first arg is constant true, return it. */
12178 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12179 return fold_convert_loc (loc, type, arg0);
12180 case TRUTH_OR_EXPR:
12181 /* If either arg is constant zero, drop it. */
12182 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12183 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12184 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12185 /* Preserve sequence points. */
12186 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12187 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12188 /* If second arg is constant true, result is true, but we must
12189 evaluate first arg. */
12190 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12191 return omit_one_operand_loc (loc, type, arg1, arg0);
12192 /* Likewise for first arg, but note this only occurs here for
12193 TRUTH_OR_EXPR. */
12194 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12195 return omit_one_operand_loc (loc, type, arg0, arg1);
12196
12197 /* !X || X is always true. */
12198 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12200 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12201 /* X || !X is always true. */
12202 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12203 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12204 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12205
12206 /* (X && !Y) || (!X && Y) is X ^ Y */
12207 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12208 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12209 {
12210 tree a0, a1, l0, l1, n0, n1;
12211
12212 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12213 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12214
12215 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12216 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12217
12218 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12219 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12220
12221 if ((operand_equal_p (n0, a0, 0)
12222 && operand_equal_p (n1, a1, 0))
12223 || (operand_equal_p (n0, a1, 0)
12224 && operand_equal_p (n1, a0, 0)))
12225 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12226 }
12227
12228 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12229 != NULL_TREE)
12230 return tem;
12231
12232 return NULL_TREE;
12233
12234 case TRUTH_XOR_EXPR:
12235 /* If the second arg is constant zero, drop it. */
12236 if (integer_zerop (arg1))
12237 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12238 /* If the second arg is constant true, this is a logical inversion. */
12239 if (integer_onep (arg1))
12240 {
12241 tem = invert_truthvalue_loc (loc, arg0);
12242 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12243 }
12244 /* Identical arguments cancel to zero. */
12245 if (operand_equal_p (arg0, arg1, 0))
12246 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12247
12248 /* !X ^ X is always true. */
12249 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12250 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12251 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12252
12253 /* X ^ !X is always true. */
12254 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12255 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12256 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12257
12258 return NULL_TREE;
12259
12260 case EQ_EXPR:
12261 case NE_EXPR:
12262 STRIP_NOPS (arg0);
12263 STRIP_NOPS (arg1);
12264
12265 tem = fold_comparison (loc, code, type, op0, op1);
12266 if (tem != NULL_TREE)
12267 return tem;
12268
12269 /* bool_var != 0 becomes bool_var. */
12270 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12271 && code == NE_EXPR)
12272 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12273
12274 /* bool_var == 1 becomes bool_var. */
12275 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12276 && code == EQ_EXPR)
12277 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12278
12279 /* bool_var != 1 becomes !bool_var. */
12280 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12281 && code == NE_EXPR)
12282 return fold_convert_loc (loc, type,
12283 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12284 TREE_TYPE (arg0), arg0));
12285
12286 /* bool_var == 0 becomes !bool_var. */
12287 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12288 && code == EQ_EXPR)
12289 return fold_convert_loc (loc, type,
12290 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12291 TREE_TYPE (arg0), arg0));
12292
12293 /* !exp != 0 becomes !exp */
12294 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12295 && code == NE_EXPR)
12296 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12297
12298 /* If this is an equality comparison of the address of two non-weak,
12299 unaliased symbols neither of which are extern (since we do not
12300 have access to attributes for externs), then we know the result. */
12301 if (TREE_CODE (arg0) == ADDR_EXPR
12302 && DECL_P (TREE_OPERAND (arg0, 0))
12303 && TREE_CODE (arg1) == ADDR_EXPR
12304 && DECL_P (TREE_OPERAND (arg1, 0)))
12305 {
12306 int equal;
12307
12308 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12309 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12310 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12311 ->equal_address_to (symtab_node::get_create
12312 (TREE_OPERAND (arg1, 0)));
12313 else
12314 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12315 if (equal != 2)
12316 return constant_boolean_node (equal
12317 ? code == EQ_EXPR : code != EQ_EXPR,
12318 type);
12319 }
12320
12321 /* Similarly for a NEGATE_EXPR. */
12322 if (TREE_CODE (arg0) == NEGATE_EXPR
12323 && TREE_CODE (arg1) == INTEGER_CST
12324 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12325 arg1)))
12326 && TREE_CODE (tem) == INTEGER_CST
12327 && !TREE_OVERFLOW (tem))
12328 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12329
12330 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12331 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12332 && TREE_CODE (arg1) == INTEGER_CST
12333 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12334 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12335 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12336 fold_convert_loc (loc,
12337 TREE_TYPE (arg0),
12338 arg1),
12339 TREE_OPERAND (arg0, 1)));
12340
12341 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12342 if ((TREE_CODE (arg0) == PLUS_EXPR
12343 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12344 || TREE_CODE (arg0) == MINUS_EXPR)
12345 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12346 0)),
12347 arg1, 0)
12348 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12349 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12350 {
12351 tree val = TREE_OPERAND (arg0, 1);
12352 return omit_two_operands_loc (loc, type,
12353 fold_build2_loc (loc, code, type,
12354 val,
12355 build_int_cst (TREE_TYPE (val),
12356 0)),
12357 TREE_OPERAND (arg0, 0), arg1);
12358 }
12359
12360 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12361 if (TREE_CODE (arg0) == MINUS_EXPR
12362 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12363 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12364 1)),
12365 arg1, 0)
12366 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12367 {
12368 return omit_two_operands_loc (loc, type,
12369 code == NE_EXPR
12370 ? boolean_true_node : boolean_false_node,
12371 TREE_OPERAND (arg0, 1), arg1);
12372 }
12373
12374 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12375 if (TREE_CODE (arg0) == ABS_EXPR
12376 && (integer_zerop (arg1) || real_zerop (arg1)))
12377 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12378
12379 /* If this is an EQ or NE comparison with zero and ARG0 is
12380 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12381 two operations, but the latter can be done in one less insn
12382 on machines that have only two-operand insns or on which a
12383 constant cannot be the first operand. */
12384 if (TREE_CODE (arg0) == BIT_AND_EXPR
12385 && integer_zerop (arg1))
12386 {
12387 tree arg00 = TREE_OPERAND (arg0, 0);
12388 tree arg01 = TREE_OPERAND (arg0, 1);
12389 if (TREE_CODE (arg00) == LSHIFT_EXPR
12390 && integer_onep (TREE_OPERAND (arg00, 0)))
12391 {
12392 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12393 arg01, TREE_OPERAND (arg00, 1));
12394 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12395 build_int_cst (TREE_TYPE (arg0), 1));
12396 return fold_build2_loc (loc, code, type,
12397 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12398 arg1);
12399 }
12400 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12401 && integer_onep (TREE_OPERAND (arg01, 0)))
12402 {
12403 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12404 arg00, TREE_OPERAND (arg01, 1));
12405 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12406 build_int_cst (TREE_TYPE (arg0), 1));
12407 return fold_build2_loc (loc, code, type,
12408 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12409 arg1);
12410 }
12411 }
12412
12413 /* If this is an NE or EQ comparison of zero against the result of a
12414 signed MOD operation whose second operand is a power of 2, make
12415 the MOD operation unsigned since it is simpler and equivalent. */
12416 if (integer_zerop (arg1)
12417 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12418 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12419 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12420 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12421 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12422 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12423 {
12424 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12425 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12426 fold_convert_loc (loc, newtype,
12427 TREE_OPERAND (arg0, 0)),
12428 fold_convert_loc (loc, newtype,
12429 TREE_OPERAND (arg0, 1)));
12430
12431 return fold_build2_loc (loc, code, type, newmod,
12432 fold_convert_loc (loc, newtype, arg1));
12433 }
12434
12435 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12436 C1 is a valid shift constant, and C2 is a power of two, i.e.
12437 a single bit. */
12438 if (TREE_CODE (arg0) == BIT_AND_EXPR
12439 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12440 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12441 == INTEGER_CST
12442 && integer_pow2p (TREE_OPERAND (arg0, 1))
12443 && integer_zerop (arg1))
12444 {
12445 tree itype = TREE_TYPE (arg0);
12446 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12447 prec = TYPE_PRECISION (itype);
12448
12449 /* Check for a valid shift count. */
12450 if (wi::ltu_p (arg001, prec))
12451 {
12452 tree arg01 = TREE_OPERAND (arg0, 1);
12453 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12454 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12455 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12456 can be rewritten as (X & (C2 << C1)) != 0. */
12457 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12458 {
12459 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12460 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12461 return fold_build2_loc (loc, code, type, tem,
12462 fold_convert_loc (loc, itype, arg1));
12463 }
12464 /* Otherwise, for signed (arithmetic) shifts,
12465 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12466 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12467 else if (!TYPE_UNSIGNED (itype))
12468 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12469 arg000, build_int_cst (itype, 0));
12470 /* Otherwise, of unsigned (logical) shifts,
12471 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12472 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12473 else
12474 return omit_one_operand_loc (loc, type,
12475 code == EQ_EXPR ? integer_one_node
12476 : integer_zero_node,
12477 arg000);
12478 }
12479 }
12480
12481 /* If we have (A & C) == C where C is a power of 2, convert this into
12482 (A & C) != 0. Similarly for NE_EXPR. */
12483 if (TREE_CODE (arg0) == BIT_AND_EXPR
12484 && integer_pow2p (TREE_OPERAND (arg0, 1))
12485 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12486 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12487 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12488 integer_zero_node));
12489
12490 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12491 bit, then fold the expression into A < 0 or A >= 0. */
12492 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12493 if (tem)
12494 return tem;
12495
12496 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12497 Similarly for NE_EXPR. */
12498 if (TREE_CODE (arg0) == BIT_AND_EXPR
12499 && TREE_CODE (arg1) == INTEGER_CST
12500 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12501 {
12502 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12503 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12504 TREE_OPERAND (arg0, 1));
12505 tree dandnotc
12506 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12507 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12508 notc);
12509 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12510 if (integer_nonzerop (dandnotc))
12511 return omit_one_operand_loc (loc, type, rslt, arg0);
12512 }
12513
12514 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12515 Similarly for NE_EXPR. */
12516 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12517 && TREE_CODE (arg1) == INTEGER_CST
12518 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12519 {
12520 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12521 tree candnotd
12522 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12523 TREE_OPERAND (arg0, 1),
12524 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12525 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12526 if (integer_nonzerop (candnotd))
12527 return omit_one_operand_loc (loc, type, rslt, arg0);
12528 }
12529
12530 /* If this is a comparison of a field, we may be able to simplify it. */
12531 if ((TREE_CODE (arg0) == COMPONENT_REF
12532 || TREE_CODE (arg0) == BIT_FIELD_REF)
12533 /* Handle the constant case even without -O
12534 to make sure the warnings are given. */
12535 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12536 {
12537 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12538 if (t1)
12539 return t1;
12540 }
12541
12542 /* Optimize comparisons of strlen vs zero to a compare of the
12543 first character of the string vs zero. To wit,
12544 strlen(ptr) == 0 => *ptr == 0
12545 strlen(ptr) != 0 => *ptr != 0
12546 Other cases should reduce to one of these two (or a constant)
12547 due to the return value of strlen being unsigned. */
12548 if (TREE_CODE (arg0) == CALL_EXPR
12549 && integer_zerop (arg1))
12550 {
12551 tree fndecl = get_callee_fndecl (arg0);
12552
12553 if (fndecl
12554 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12555 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12556 && call_expr_nargs (arg0) == 1
12557 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12558 {
12559 tree iref = build_fold_indirect_ref_loc (loc,
12560 CALL_EXPR_ARG (arg0, 0));
12561 return fold_build2_loc (loc, code, type, iref,
12562 build_int_cst (TREE_TYPE (iref), 0));
12563 }
12564 }
12565
12566 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12567 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12568 if (TREE_CODE (arg0) == RSHIFT_EXPR
12569 && integer_zerop (arg1)
12570 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12571 {
12572 tree arg00 = TREE_OPERAND (arg0, 0);
12573 tree arg01 = TREE_OPERAND (arg0, 1);
12574 tree itype = TREE_TYPE (arg00);
12575 if (wi::eq_p (arg01, element_precision (itype) - 1))
12576 {
12577 if (TYPE_UNSIGNED (itype))
12578 {
12579 itype = signed_type_for (itype);
12580 arg00 = fold_convert_loc (loc, itype, arg00);
12581 }
12582 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12583 type, arg00, build_zero_cst (itype));
12584 }
12585 }
12586
12587 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12588 if (integer_zerop (arg1)
12589 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12590 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12591 TREE_OPERAND (arg0, 1));
12592
12593 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12594 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12595 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12596 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12597 build_zero_cst (TREE_TYPE (arg0)));
12598 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12599 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12600 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12601 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12602 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12603 build_zero_cst (TREE_TYPE (arg0)));
12604
12605 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12606 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12607 && TREE_CODE (arg1) == INTEGER_CST
12608 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12609 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12610 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12611 TREE_OPERAND (arg0, 1), arg1));
12612
12613 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12614 (X & C) == 0 when C is a single bit. */
12615 if (TREE_CODE (arg0) == BIT_AND_EXPR
12616 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12617 && integer_zerop (arg1)
12618 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12619 {
12620 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12621 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12622 TREE_OPERAND (arg0, 1));
12623 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12624 type, tem,
12625 fold_convert_loc (loc, TREE_TYPE (arg0),
12626 arg1));
12627 }
12628
12629 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12630 constant C is a power of two, i.e. a single bit. */
12631 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12632 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12633 && integer_zerop (arg1)
12634 && integer_pow2p (TREE_OPERAND (arg0, 1))
12635 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12636 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12637 {
12638 tree arg00 = TREE_OPERAND (arg0, 0);
12639 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12640 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12641 }
12642
12643 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12644 when is C is a power of two, i.e. a single bit. */
12645 if (TREE_CODE (arg0) == BIT_AND_EXPR
12646 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12647 && integer_zerop (arg1)
12648 && integer_pow2p (TREE_OPERAND (arg0, 1))
12649 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12650 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12651 {
12652 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12653 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12654 arg000, TREE_OPERAND (arg0, 1));
12655 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12656 tem, build_int_cst (TREE_TYPE (tem), 0));
12657 }
12658
12659 if (integer_zerop (arg1)
12660 && tree_expr_nonzero_p (arg0))
12661 {
12662 tree res = constant_boolean_node (code==NE_EXPR, type);
12663 return omit_one_operand_loc (loc, type, res, arg0);
12664 }
12665
12666 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12667 if (TREE_CODE (arg0) == NEGATE_EXPR
12668 && TREE_CODE (arg1) == NEGATE_EXPR)
12669 return fold_build2_loc (loc, code, type,
12670 TREE_OPERAND (arg0, 0),
12671 fold_convert_loc (loc, TREE_TYPE (arg0),
12672 TREE_OPERAND (arg1, 0)));
12673
12674 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12675 if (TREE_CODE (arg0) == BIT_AND_EXPR
12676 && TREE_CODE (arg1) == BIT_AND_EXPR)
12677 {
12678 tree arg00 = TREE_OPERAND (arg0, 0);
12679 tree arg01 = TREE_OPERAND (arg0, 1);
12680 tree arg10 = TREE_OPERAND (arg1, 0);
12681 tree arg11 = TREE_OPERAND (arg1, 1);
12682 tree itype = TREE_TYPE (arg0);
12683
12684 if (operand_equal_p (arg01, arg11, 0))
12685 return fold_build2_loc (loc, code, type,
12686 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12687 fold_build2_loc (loc,
12688 BIT_XOR_EXPR, itype,
12689 arg00, arg10),
12690 arg01),
12691 build_zero_cst (itype));
12692
12693 if (operand_equal_p (arg01, arg10, 0))
12694 return fold_build2_loc (loc, code, type,
12695 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12696 fold_build2_loc (loc,
12697 BIT_XOR_EXPR, itype,
12698 arg00, arg11),
12699 arg01),
12700 build_zero_cst (itype));
12701
12702 if (operand_equal_p (arg00, arg11, 0))
12703 return fold_build2_loc (loc, code, type,
12704 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12705 fold_build2_loc (loc,
12706 BIT_XOR_EXPR, itype,
12707 arg01, arg10),
12708 arg00),
12709 build_zero_cst (itype));
12710
12711 if (operand_equal_p (arg00, arg10, 0))
12712 return fold_build2_loc (loc, code, type,
12713 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12714 fold_build2_loc (loc,
12715 BIT_XOR_EXPR, itype,
12716 arg01, arg11),
12717 arg00),
12718 build_zero_cst (itype));
12719 }
12720
12721 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12722 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12723 {
12724 tree arg00 = TREE_OPERAND (arg0, 0);
12725 tree arg01 = TREE_OPERAND (arg0, 1);
12726 tree arg10 = TREE_OPERAND (arg1, 0);
12727 tree arg11 = TREE_OPERAND (arg1, 1);
12728 tree itype = TREE_TYPE (arg0);
12729
12730 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12731 operand_equal_p guarantees no side-effects so we don't need
12732 to use omit_one_operand on Z. */
12733 if (operand_equal_p (arg01, arg11, 0))
12734 return fold_build2_loc (loc, code, type, arg00,
12735 fold_convert_loc (loc, TREE_TYPE (arg00),
12736 arg10));
12737 if (operand_equal_p (arg01, arg10, 0))
12738 return fold_build2_loc (loc, code, type, arg00,
12739 fold_convert_loc (loc, TREE_TYPE (arg00),
12740 arg11));
12741 if (operand_equal_p (arg00, arg11, 0))
12742 return fold_build2_loc (loc, code, type, arg01,
12743 fold_convert_loc (loc, TREE_TYPE (arg01),
12744 arg10));
12745 if (operand_equal_p (arg00, arg10, 0))
12746 return fold_build2_loc (loc, code, type, arg01,
12747 fold_convert_loc (loc, TREE_TYPE (arg01),
12748 arg11));
12749
12750 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12751 if (TREE_CODE (arg01) == INTEGER_CST
12752 && TREE_CODE (arg11) == INTEGER_CST)
12753 {
12754 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12755 fold_convert_loc (loc, itype, arg11));
12756 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12757 return fold_build2_loc (loc, code, type, tem,
12758 fold_convert_loc (loc, itype, arg10));
12759 }
12760 }
12761
12762 /* Attempt to simplify equality/inequality comparisons of complex
12763 values. Only lower the comparison if the result is known or
12764 can be simplified to a single scalar comparison. */
12765 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12766 || TREE_CODE (arg0) == COMPLEX_CST)
12767 && (TREE_CODE (arg1) == COMPLEX_EXPR
12768 || TREE_CODE (arg1) == COMPLEX_CST))
12769 {
12770 tree real0, imag0, real1, imag1;
12771 tree rcond, icond;
12772
12773 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12774 {
12775 real0 = TREE_OPERAND (arg0, 0);
12776 imag0 = TREE_OPERAND (arg0, 1);
12777 }
12778 else
12779 {
12780 real0 = TREE_REALPART (arg0);
12781 imag0 = TREE_IMAGPART (arg0);
12782 }
12783
12784 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12785 {
12786 real1 = TREE_OPERAND (arg1, 0);
12787 imag1 = TREE_OPERAND (arg1, 1);
12788 }
12789 else
12790 {
12791 real1 = TREE_REALPART (arg1);
12792 imag1 = TREE_IMAGPART (arg1);
12793 }
12794
12795 rcond = fold_binary_loc (loc, code, type, real0, real1);
12796 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12797 {
12798 if (integer_zerop (rcond))
12799 {
12800 if (code == EQ_EXPR)
12801 return omit_two_operands_loc (loc, type, boolean_false_node,
12802 imag0, imag1);
12803 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12804 }
12805 else
12806 {
12807 if (code == NE_EXPR)
12808 return omit_two_operands_loc (loc, type, boolean_true_node,
12809 imag0, imag1);
12810 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12811 }
12812 }
12813
12814 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12815 if (icond && TREE_CODE (icond) == INTEGER_CST)
12816 {
12817 if (integer_zerop (icond))
12818 {
12819 if (code == EQ_EXPR)
12820 return omit_two_operands_loc (loc, type, boolean_false_node,
12821 real0, real1);
12822 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12823 }
12824 else
12825 {
12826 if (code == NE_EXPR)
12827 return omit_two_operands_loc (loc, type, boolean_true_node,
12828 real0, real1);
12829 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12830 }
12831 }
12832 }
12833
12834 return NULL_TREE;
12835
12836 case LT_EXPR:
12837 case GT_EXPR:
12838 case LE_EXPR:
12839 case GE_EXPR:
12840 tem = fold_comparison (loc, code, type, op0, op1);
12841 if (tem != NULL_TREE)
12842 return tem;
12843
12844 /* Transform comparisons of the form X +- C CMP X. */
12845 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12846 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12847 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12848 && !HONOR_SNANS (arg0))
12849 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12850 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12851 {
12852 tree arg01 = TREE_OPERAND (arg0, 1);
12853 enum tree_code code0 = TREE_CODE (arg0);
12854 int is_positive;
12855
12856 if (TREE_CODE (arg01) == REAL_CST)
12857 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12858 else
12859 is_positive = tree_int_cst_sgn (arg01);
12860
12861 /* (X - c) > X becomes false. */
12862 if (code == GT_EXPR
12863 && ((code0 == MINUS_EXPR && is_positive >= 0)
12864 || (code0 == PLUS_EXPR && is_positive <= 0)))
12865 {
12866 if (TREE_CODE (arg01) == INTEGER_CST
12867 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12868 fold_overflow_warning (("assuming signed overflow does not "
12869 "occur when assuming that (X - c) > X "
12870 "is always false"),
12871 WARN_STRICT_OVERFLOW_ALL);
12872 return constant_boolean_node (0, type);
12873 }
12874
12875 /* Likewise (X + c) < X becomes false. */
12876 if (code == LT_EXPR
12877 && ((code0 == PLUS_EXPR && is_positive >= 0)
12878 || (code0 == MINUS_EXPR && is_positive <= 0)))
12879 {
12880 if (TREE_CODE (arg01) == INTEGER_CST
12881 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12882 fold_overflow_warning (("assuming signed overflow does not "
12883 "occur when assuming that "
12884 "(X + c) < X is always false"),
12885 WARN_STRICT_OVERFLOW_ALL);
12886 return constant_boolean_node (0, type);
12887 }
12888
12889 /* Convert (X - c) <= X to true. */
12890 if (!HONOR_NANS (arg1)
12891 && code == LE_EXPR
12892 && ((code0 == MINUS_EXPR && is_positive >= 0)
12893 || (code0 == PLUS_EXPR && is_positive <= 0)))
12894 {
12895 if (TREE_CODE (arg01) == INTEGER_CST
12896 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12897 fold_overflow_warning (("assuming signed overflow does not "
12898 "occur when assuming that "
12899 "(X - c) <= X is always true"),
12900 WARN_STRICT_OVERFLOW_ALL);
12901 return constant_boolean_node (1, type);
12902 }
12903
12904 /* Convert (X + c) >= X to true. */
12905 if (!HONOR_NANS (arg1)
12906 && code == GE_EXPR
12907 && ((code0 == PLUS_EXPR && is_positive >= 0)
12908 || (code0 == MINUS_EXPR && is_positive <= 0)))
12909 {
12910 if (TREE_CODE (arg01) == INTEGER_CST
12911 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12912 fold_overflow_warning (("assuming signed overflow does not "
12913 "occur when assuming that "
12914 "(X + c) >= X is always true"),
12915 WARN_STRICT_OVERFLOW_ALL);
12916 return constant_boolean_node (1, type);
12917 }
12918
12919 if (TREE_CODE (arg01) == INTEGER_CST)
12920 {
12921 /* Convert X + c > X and X - c < X to true for integers. */
12922 if (code == GT_EXPR
12923 && ((code0 == PLUS_EXPR && is_positive > 0)
12924 || (code0 == MINUS_EXPR && is_positive < 0)))
12925 {
12926 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12927 fold_overflow_warning (("assuming signed overflow does "
12928 "not occur when assuming that "
12929 "(X + c) > X is always true"),
12930 WARN_STRICT_OVERFLOW_ALL);
12931 return constant_boolean_node (1, type);
12932 }
12933
12934 if (code == LT_EXPR
12935 && ((code0 == MINUS_EXPR && is_positive > 0)
12936 || (code0 == PLUS_EXPR && is_positive < 0)))
12937 {
12938 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12939 fold_overflow_warning (("assuming signed overflow does "
12940 "not occur when assuming that "
12941 "(X - c) < X is always true"),
12942 WARN_STRICT_OVERFLOW_ALL);
12943 return constant_boolean_node (1, type);
12944 }
12945
12946 /* Convert X + c <= X and X - c >= X to false for integers. */
12947 if (code == LE_EXPR
12948 && ((code0 == PLUS_EXPR && is_positive > 0)
12949 || (code0 == MINUS_EXPR && is_positive < 0)))
12950 {
12951 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12952 fold_overflow_warning (("assuming signed overflow does "
12953 "not occur when assuming that "
12954 "(X + c) <= X is always false"),
12955 WARN_STRICT_OVERFLOW_ALL);
12956 return constant_boolean_node (0, type);
12957 }
12958
12959 if (code == GE_EXPR
12960 && ((code0 == MINUS_EXPR && is_positive > 0)
12961 || (code0 == PLUS_EXPR && is_positive < 0)))
12962 {
12963 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12964 fold_overflow_warning (("assuming signed overflow does "
12965 "not occur when assuming that "
12966 "(X - c) >= X is always false"),
12967 WARN_STRICT_OVERFLOW_ALL);
12968 return constant_boolean_node (0, type);
12969 }
12970 }
12971 }
12972
12973 /* Comparisons with the highest or lowest possible integer of
12974 the specified precision will have known values. */
12975 {
12976 tree arg1_type = TREE_TYPE (arg1);
12977 unsigned int prec = TYPE_PRECISION (arg1_type);
12978
12979 if (TREE_CODE (arg1) == INTEGER_CST
12980 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12981 {
12982 wide_int max = wi::max_value (arg1_type);
12983 wide_int signed_max = wi::max_value (prec, SIGNED);
12984 wide_int min = wi::min_value (arg1_type);
12985
12986 if (wi::eq_p (arg1, max))
12987 switch (code)
12988 {
12989 case GT_EXPR:
12990 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12991
12992 case GE_EXPR:
12993 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12994
12995 case LE_EXPR:
12996 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12997
12998 case LT_EXPR:
12999 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13000
13001 /* The GE_EXPR and LT_EXPR cases above are not normally
13002 reached because of previous transformations. */
13003
13004 default:
13005 break;
13006 }
13007 else if (wi::eq_p (arg1, max - 1))
13008 switch (code)
13009 {
13010 case GT_EXPR:
13011 arg1 = const_binop (PLUS_EXPR, arg1,
13012 build_int_cst (TREE_TYPE (arg1), 1));
13013 return fold_build2_loc (loc, EQ_EXPR, type,
13014 fold_convert_loc (loc,
13015 TREE_TYPE (arg1), arg0),
13016 arg1);
13017 case LE_EXPR:
13018 arg1 = const_binop (PLUS_EXPR, arg1,
13019 build_int_cst (TREE_TYPE (arg1), 1));
13020 return fold_build2_loc (loc, NE_EXPR, type,
13021 fold_convert_loc (loc, TREE_TYPE (arg1),
13022 arg0),
13023 arg1);
13024 default:
13025 break;
13026 }
13027 else if (wi::eq_p (arg1, min))
13028 switch (code)
13029 {
13030 case LT_EXPR:
13031 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13032
13033 case LE_EXPR:
13034 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13035
13036 case GE_EXPR:
13037 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13038
13039 case GT_EXPR:
13040 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13041
13042 default:
13043 break;
13044 }
13045 else if (wi::eq_p (arg1, min + 1))
13046 switch (code)
13047 {
13048 case GE_EXPR:
13049 arg1 = const_binop (MINUS_EXPR, arg1,
13050 build_int_cst (TREE_TYPE (arg1), 1));
13051 return fold_build2_loc (loc, NE_EXPR, type,
13052 fold_convert_loc (loc,
13053 TREE_TYPE (arg1), arg0),
13054 arg1);
13055 case LT_EXPR:
13056 arg1 = const_binop (MINUS_EXPR, arg1,
13057 build_int_cst (TREE_TYPE (arg1), 1));
13058 return fold_build2_loc (loc, EQ_EXPR, type,
13059 fold_convert_loc (loc, TREE_TYPE (arg1),
13060 arg0),
13061 arg1);
13062 default:
13063 break;
13064 }
13065
13066 else if (wi::eq_p (arg1, signed_max)
13067 && TYPE_UNSIGNED (arg1_type)
13068 /* We will flip the signedness of the comparison operator
13069 associated with the mode of arg1, so the sign bit is
13070 specified by this mode. Check that arg1 is the signed
13071 max associated with this sign bit. */
13072 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13073 /* signed_type does not work on pointer types. */
13074 && INTEGRAL_TYPE_P (arg1_type))
13075 {
13076 /* The following case also applies to X < signed_max+1
13077 and X >= signed_max+1 because previous transformations. */
13078 if (code == LE_EXPR || code == GT_EXPR)
13079 {
13080 tree st = signed_type_for (arg1_type);
13081 return fold_build2_loc (loc,
13082 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13083 type, fold_convert_loc (loc, st, arg0),
13084 build_int_cst (st, 0));
13085 }
13086 }
13087 }
13088 }
13089
13090 /* If we are comparing an ABS_EXPR with a constant, we can
13091 convert all the cases into explicit comparisons, but they may
13092 well not be faster than doing the ABS and one comparison.
13093 But ABS (X) <= C is a range comparison, which becomes a subtraction
13094 and a comparison, and is probably faster. */
13095 if (code == LE_EXPR
13096 && TREE_CODE (arg1) == INTEGER_CST
13097 && TREE_CODE (arg0) == ABS_EXPR
13098 && ! TREE_SIDE_EFFECTS (arg0)
13099 && (0 != (tem = negate_expr (arg1)))
13100 && TREE_CODE (tem) == INTEGER_CST
13101 && !TREE_OVERFLOW (tem))
13102 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13103 build2 (GE_EXPR, type,
13104 TREE_OPERAND (arg0, 0), tem),
13105 build2 (LE_EXPR, type,
13106 TREE_OPERAND (arg0, 0), arg1));
13107
13108 /* Convert ABS_EXPR<x> >= 0 to true. */
13109 strict_overflow_p = false;
13110 if (code == GE_EXPR
13111 && (integer_zerop (arg1)
13112 || (! HONOR_NANS (arg0)
13113 && real_zerop (arg1)))
13114 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13115 {
13116 if (strict_overflow_p)
13117 fold_overflow_warning (("assuming signed overflow does not occur "
13118 "when simplifying comparison of "
13119 "absolute value and zero"),
13120 WARN_STRICT_OVERFLOW_CONDITIONAL);
13121 return omit_one_operand_loc (loc, type,
13122 constant_boolean_node (true, type),
13123 arg0);
13124 }
13125
13126 /* Convert ABS_EXPR<x> < 0 to false. */
13127 strict_overflow_p = false;
13128 if (code == LT_EXPR
13129 && (integer_zerop (arg1) || real_zerop (arg1))
13130 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13131 {
13132 if (strict_overflow_p)
13133 fold_overflow_warning (("assuming signed overflow does not occur "
13134 "when simplifying comparison of "
13135 "absolute value and zero"),
13136 WARN_STRICT_OVERFLOW_CONDITIONAL);
13137 return omit_one_operand_loc (loc, type,
13138 constant_boolean_node (false, type),
13139 arg0);
13140 }
13141
13142 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13143 and similarly for >= into !=. */
13144 if ((code == LT_EXPR || code == GE_EXPR)
13145 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13146 && TREE_CODE (arg1) == LSHIFT_EXPR
13147 && integer_onep (TREE_OPERAND (arg1, 0)))
13148 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13149 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13150 TREE_OPERAND (arg1, 1)),
13151 build_zero_cst (TREE_TYPE (arg0)));
13152
13153 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13154 otherwise Y might be >= # of bits in X's type and thus e.g.
13155 (unsigned char) (1 << Y) for Y 15 might be 0.
13156 If the cast is widening, then 1 << Y should have unsigned type,
13157 otherwise if Y is number of bits in the signed shift type minus 1,
13158 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13159 31 might be 0xffffffff80000000. */
13160 if ((code == LT_EXPR || code == GE_EXPR)
13161 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13162 && CONVERT_EXPR_P (arg1)
13163 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13164 && (element_precision (TREE_TYPE (arg1))
13165 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13166 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13167 || (element_precision (TREE_TYPE (arg1))
13168 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13169 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13170 {
13171 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13172 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13173 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13174 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13175 build_zero_cst (TREE_TYPE (arg0)));
13176 }
13177
13178 return NULL_TREE;
13179
13180 case UNORDERED_EXPR:
13181 case ORDERED_EXPR:
13182 case UNLT_EXPR:
13183 case UNLE_EXPR:
13184 case UNGT_EXPR:
13185 case UNGE_EXPR:
13186 case UNEQ_EXPR:
13187 case LTGT_EXPR:
13188 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13189 {
13190 t1 = fold_relational_const (code, type, arg0, arg1);
13191 if (t1 != NULL_TREE)
13192 return t1;
13193 }
13194
13195 /* If the first operand is NaN, the result is constant. */
13196 if (TREE_CODE (arg0) == REAL_CST
13197 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13198 && (code != LTGT_EXPR || ! flag_trapping_math))
13199 {
13200 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13201 ? integer_zero_node
13202 : integer_one_node;
13203 return omit_one_operand_loc (loc, type, t1, arg1);
13204 }
13205
13206 /* If the second operand is NaN, the result is constant. */
13207 if (TREE_CODE (arg1) == REAL_CST
13208 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13209 && (code != LTGT_EXPR || ! flag_trapping_math))
13210 {
13211 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13212 ? integer_zero_node
13213 : integer_one_node;
13214 return omit_one_operand_loc (loc, type, t1, arg0);
13215 }
13216
13217 /* Simplify unordered comparison of something with itself. */
13218 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13219 && operand_equal_p (arg0, arg1, 0))
13220 return constant_boolean_node (1, type);
13221
13222 if (code == LTGT_EXPR
13223 && !flag_trapping_math
13224 && operand_equal_p (arg0, arg1, 0))
13225 return constant_boolean_node (0, type);
13226
13227 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13228 {
13229 tree targ0 = strip_float_extensions (arg0);
13230 tree targ1 = strip_float_extensions (arg1);
13231 tree newtype = TREE_TYPE (targ0);
13232
13233 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13234 newtype = TREE_TYPE (targ1);
13235
13236 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13237 return fold_build2_loc (loc, code, type,
13238 fold_convert_loc (loc, newtype, targ0),
13239 fold_convert_loc (loc, newtype, targ1));
13240 }
13241
13242 return NULL_TREE;
13243
13244 case COMPOUND_EXPR:
13245 /* When pedantic, a compound expression can be neither an lvalue
13246 nor an integer constant expression. */
13247 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13248 return NULL_TREE;
13249 /* Don't let (0, 0) be null pointer constant. */
13250 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13251 : fold_convert_loc (loc, type, arg1);
13252 return pedantic_non_lvalue_loc (loc, tem);
13253
13254 case ASSERT_EXPR:
13255 /* An ASSERT_EXPR should never be passed to fold_binary. */
13256 gcc_unreachable ();
13257
13258 default:
13259 return NULL_TREE;
13260 } /* switch (code) */
13261 }
13262
13263 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13264 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13265 of GOTO_EXPR. */
13266
13267 static tree
13268 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13269 {
13270 switch (TREE_CODE (*tp))
13271 {
13272 case LABEL_EXPR:
13273 return *tp;
13274
13275 case GOTO_EXPR:
13276 *walk_subtrees = 0;
13277
13278 /* ... fall through ... */
13279
13280 default:
13281 return NULL_TREE;
13282 }
13283 }
13284
13285 /* Return whether the sub-tree ST contains a label which is accessible from
13286 outside the sub-tree. */
13287
13288 static bool
13289 contains_label_p (tree st)
13290 {
13291 return
13292 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13293 }
13294
13295 /* Fold a ternary expression of code CODE and type TYPE with operands
13296 OP0, OP1, and OP2. Return the folded expression if folding is
13297 successful. Otherwise, return NULL_TREE. */
13298
13299 tree
13300 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13301 tree op0, tree op1, tree op2)
13302 {
13303 tree tem;
13304 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13305 enum tree_code_class kind = TREE_CODE_CLASS (code);
13306
13307 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13308 && TREE_CODE_LENGTH (code) == 3);
13309
13310 /* If this is a commutative operation, and OP0 is a constant, move it
13311 to OP1 to reduce the number of tests below. */
13312 if (commutative_ternary_tree_code (code)
13313 && tree_swap_operands_p (op0, op1, true))
13314 return fold_build3_loc (loc, code, type, op1, op0, op2);
13315
13316 tem = generic_simplify (loc, code, type, op0, op1, op2);
13317 if (tem)
13318 return tem;
13319
13320 /* Strip any conversions that don't change the mode. This is safe
13321 for every expression, except for a comparison expression because
13322 its signedness is derived from its operands. So, in the latter
13323 case, only strip conversions that don't change the signedness.
13324
13325 Note that this is done as an internal manipulation within the
13326 constant folder, in order to find the simplest representation of
13327 the arguments so that their form can be studied. In any cases,
13328 the appropriate type conversions should be put back in the tree
13329 that will get out of the constant folder. */
13330 if (op0)
13331 {
13332 arg0 = op0;
13333 STRIP_NOPS (arg0);
13334 }
13335
13336 if (op1)
13337 {
13338 arg1 = op1;
13339 STRIP_NOPS (arg1);
13340 }
13341
13342 if (op2)
13343 {
13344 arg2 = op2;
13345 STRIP_NOPS (arg2);
13346 }
13347
13348 switch (code)
13349 {
13350 case COMPONENT_REF:
13351 if (TREE_CODE (arg0) == CONSTRUCTOR
13352 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13353 {
13354 unsigned HOST_WIDE_INT idx;
13355 tree field, value;
13356 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13357 if (field == arg1)
13358 return value;
13359 }
13360 return NULL_TREE;
13361
13362 case COND_EXPR:
13363 case VEC_COND_EXPR:
13364 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13365 so all simple results must be passed through pedantic_non_lvalue. */
13366 if (TREE_CODE (arg0) == INTEGER_CST)
13367 {
13368 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13369 tem = integer_zerop (arg0) ? op2 : op1;
13370 /* Only optimize constant conditions when the selected branch
13371 has the same type as the COND_EXPR. This avoids optimizing
13372 away "c ? x : throw", where the throw has a void type.
13373 Avoid throwing away that operand which contains label. */
13374 if ((!TREE_SIDE_EFFECTS (unused_op)
13375 || !contains_label_p (unused_op))
13376 && (! VOID_TYPE_P (TREE_TYPE (tem))
13377 || VOID_TYPE_P (type)))
13378 return pedantic_non_lvalue_loc (loc, tem);
13379 return NULL_TREE;
13380 }
13381 else if (TREE_CODE (arg0) == VECTOR_CST)
13382 {
13383 if ((TREE_CODE (arg1) == VECTOR_CST
13384 || TREE_CODE (arg1) == CONSTRUCTOR)
13385 && (TREE_CODE (arg2) == VECTOR_CST
13386 || TREE_CODE (arg2) == CONSTRUCTOR))
13387 {
13388 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13389 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13390 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13391 for (i = 0; i < nelts; i++)
13392 {
13393 tree val = VECTOR_CST_ELT (arg0, i);
13394 if (integer_all_onesp (val))
13395 sel[i] = i;
13396 else if (integer_zerop (val))
13397 sel[i] = nelts + i;
13398 else /* Currently unreachable. */
13399 return NULL_TREE;
13400 }
13401 tree t = fold_vec_perm (type, arg1, arg2, sel);
13402 if (t != NULL_TREE)
13403 return t;
13404 }
13405 }
13406
13407 /* If we have A op B ? A : C, we may be able to convert this to a
13408 simpler expression, depending on the operation and the values
13409 of B and C. Signed zeros prevent all of these transformations,
13410 for reasons given above each one.
13411
13412 Also try swapping the arguments and inverting the conditional. */
13413 if (COMPARISON_CLASS_P (arg0)
13414 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13415 arg1, TREE_OPERAND (arg0, 1))
13416 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13417 {
13418 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13419 if (tem)
13420 return tem;
13421 }
13422
13423 if (COMPARISON_CLASS_P (arg0)
13424 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13425 op2,
13426 TREE_OPERAND (arg0, 1))
13427 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13428 {
13429 location_t loc0 = expr_location_or (arg0, loc);
13430 tem = fold_invert_truthvalue (loc0, arg0);
13431 if (tem && COMPARISON_CLASS_P (tem))
13432 {
13433 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13434 if (tem)
13435 return tem;
13436 }
13437 }
13438
13439 /* If the second operand is simpler than the third, swap them
13440 since that produces better jump optimization results. */
13441 if (truth_value_p (TREE_CODE (arg0))
13442 && tree_swap_operands_p (op1, op2, false))
13443 {
13444 location_t loc0 = expr_location_or (arg0, loc);
13445 /* See if this can be inverted. If it can't, possibly because
13446 it was a floating-point inequality comparison, don't do
13447 anything. */
13448 tem = fold_invert_truthvalue (loc0, arg0);
13449 if (tem)
13450 return fold_build3_loc (loc, code, type, tem, op2, op1);
13451 }
13452
13453 /* Convert A ? 1 : 0 to simply A. */
13454 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13455 : (integer_onep (op1)
13456 && !VECTOR_TYPE_P (type)))
13457 && integer_zerop (op2)
13458 /* If we try to convert OP0 to our type, the
13459 call to fold will try to move the conversion inside
13460 a COND, which will recurse. In that case, the COND_EXPR
13461 is probably the best choice, so leave it alone. */
13462 && type == TREE_TYPE (arg0))
13463 return pedantic_non_lvalue_loc (loc, arg0);
13464
13465 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13466 over COND_EXPR in cases such as floating point comparisons. */
13467 if (integer_zerop (op1)
13468 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13469 : (integer_onep (op2)
13470 && !VECTOR_TYPE_P (type)))
13471 && truth_value_p (TREE_CODE (arg0)))
13472 return pedantic_non_lvalue_loc (loc,
13473 fold_convert_loc (loc, type,
13474 invert_truthvalue_loc (loc,
13475 arg0)));
13476
13477 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13478 if (TREE_CODE (arg0) == LT_EXPR
13479 && integer_zerop (TREE_OPERAND (arg0, 1))
13480 && integer_zerop (op2)
13481 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13482 {
13483 /* sign_bit_p looks through both zero and sign extensions,
13484 but for this optimization only sign extensions are
13485 usable. */
13486 tree tem2 = TREE_OPERAND (arg0, 0);
13487 while (tem != tem2)
13488 {
13489 if (TREE_CODE (tem2) != NOP_EXPR
13490 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13491 {
13492 tem = NULL_TREE;
13493 break;
13494 }
13495 tem2 = TREE_OPERAND (tem2, 0);
13496 }
13497 /* sign_bit_p only checks ARG1 bits within A's precision.
13498 If <sign bit of A> has wider type than A, bits outside
13499 of A's precision in <sign bit of A> need to be checked.
13500 If they are all 0, this optimization needs to be done
13501 in unsigned A's type, if they are all 1 in signed A's type,
13502 otherwise this can't be done. */
13503 if (tem
13504 && TYPE_PRECISION (TREE_TYPE (tem))
13505 < TYPE_PRECISION (TREE_TYPE (arg1))
13506 && TYPE_PRECISION (TREE_TYPE (tem))
13507 < TYPE_PRECISION (type))
13508 {
13509 int inner_width, outer_width;
13510 tree tem_type;
13511
13512 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13513 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13514 if (outer_width > TYPE_PRECISION (type))
13515 outer_width = TYPE_PRECISION (type);
13516
13517 wide_int mask = wi::shifted_mask
13518 (inner_width, outer_width - inner_width, false,
13519 TYPE_PRECISION (TREE_TYPE (arg1)));
13520
13521 wide_int common = mask & arg1;
13522 if (common == mask)
13523 {
13524 tem_type = signed_type_for (TREE_TYPE (tem));
13525 tem = fold_convert_loc (loc, tem_type, tem);
13526 }
13527 else if (common == 0)
13528 {
13529 tem_type = unsigned_type_for (TREE_TYPE (tem));
13530 tem = fold_convert_loc (loc, tem_type, tem);
13531 }
13532 else
13533 tem = NULL;
13534 }
13535
13536 if (tem)
13537 return
13538 fold_convert_loc (loc, type,
13539 fold_build2_loc (loc, BIT_AND_EXPR,
13540 TREE_TYPE (tem), tem,
13541 fold_convert_loc (loc,
13542 TREE_TYPE (tem),
13543 arg1)));
13544 }
13545
13546 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13547 already handled above. */
13548 if (TREE_CODE (arg0) == BIT_AND_EXPR
13549 && integer_onep (TREE_OPERAND (arg0, 1))
13550 && integer_zerop (op2)
13551 && integer_pow2p (arg1))
13552 {
13553 tree tem = TREE_OPERAND (arg0, 0);
13554 STRIP_NOPS (tem);
13555 if (TREE_CODE (tem) == RSHIFT_EXPR
13556 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13557 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13558 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13559 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13560 TREE_OPERAND (tem, 0), arg1);
13561 }
13562
13563 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13564 is probably obsolete because the first operand should be a
13565 truth value (that's why we have the two cases above), but let's
13566 leave it in until we can confirm this for all front-ends. */
13567 if (integer_zerop (op2)
13568 && TREE_CODE (arg0) == NE_EXPR
13569 && integer_zerop (TREE_OPERAND (arg0, 1))
13570 && integer_pow2p (arg1)
13571 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13572 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13573 arg1, OEP_ONLY_CONST))
13574 return pedantic_non_lvalue_loc (loc,
13575 fold_convert_loc (loc, type,
13576 TREE_OPERAND (arg0, 0)));
13577
13578 /* Disable the transformations below for vectors, since
13579 fold_binary_op_with_conditional_arg may undo them immediately,
13580 yielding an infinite loop. */
13581 if (code == VEC_COND_EXPR)
13582 return NULL_TREE;
13583
13584 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13585 if (integer_zerop (op2)
13586 && truth_value_p (TREE_CODE (arg0))
13587 && truth_value_p (TREE_CODE (arg1))
13588 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13589 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13590 : TRUTH_ANDIF_EXPR,
13591 type, fold_convert_loc (loc, type, arg0), arg1);
13592
13593 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13594 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13595 && truth_value_p (TREE_CODE (arg0))
13596 && truth_value_p (TREE_CODE (arg1))
13597 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13598 {
13599 location_t loc0 = expr_location_or (arg0, loc);
13600 /* Only perform transformation if ARG0 is easily inverted. */
13601 tem = fold_invert_truthvalue (loc0, arg0);
13602 if (tem)
13603 return fold_build2_loc (loc, code == VEC_COND_EXPR
13604 ? BIT_IOR_EXPR
13605 : TRUTH_ORIF_EXPR,
13606 type, fold_convert_loc (loc, type, tem),
13607 arg1);
13608 }
13609
13610 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13611 if (integer_zerop (arg1)
13612 && truth_value_p (TREE_CODE (arg0))
13613 && truth_value_p (TREE_CODE (op2))
13614 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13615 {
13616 location_t loc0 = expr_location_or (arg0, loc);
13617 /* Only perform transformation if ARG0 is easily inverted. */
13618 tem = fold_invert_truthvalue (loc0, arg0);
13619 if (tem)
13620 return fold_build2_loc (loc, code == VEC_COND_EXPR
13621 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13622 type, fold_convert_loc (loc, type, tem),
13623 op2);
13624 }
13625
13626 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13627 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13628 && truth_value_p (TREE_CODE (arg0))
13629 && truth_value_p (TREE_CODE (op2))
13630 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13631 return fold_build2_loc (loc, code == VEC_COND_EXPR
13632 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13633 type, fold_convert_loc (loc, type, arg0), op2);
13634
13635 return NULL_TREE;
13636
13637 case CALL_EXPR:
13638 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13639 of fold_ternary on them. */
13640 gcc_unreachable ();
13641
13642 case BIT_FIELD_REF:
13643 if ((TREE_CODE (arg0) == VECTOR_CST
13644 || (TREE_CODE (arg0) == CONSTRUCTOR
13645 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13646 && (type == TREE_TYPE (TREE_TYPE (arg0))
13647 || (TREE_CODE (type) == VECTOR_TYPE
13648 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13649 {
13650 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13651 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13652 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13653 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13654
13655 if (n != 0
13656 && (idx % width) == 0
13657 && (n % width) == 0
13658 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13659 {
13660 idx = idx / width;
13661 n = n / width;
13662
13663 if (TREE_CODE (arg0) == VECTOR_CST)
13664 {
13665 if (n == 1)
13666 return VECTOR_CST_ELT (arg0, idx);
13667
13668 tree *vals = XALLOCAVEC (tree, n);
13669 for (unsigned i = 0; i < n; ++i)
13670 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13671 return build_vector (type, vals);
13672 }
13673
13674 /* Constructor elements can be subvectors. */
13675 unsigned HOST_WIDE_INT k = 1;
13676 if (CONSTRUCTOR_NELTS (arg0) != 0)
13677 {
13678 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13679 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13680 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13681 }
13682
13683 /* We keep an exact subset of the constructor elements. */
13684 if ((idx % k) == 0 && (n % k) == 0)
13685 {
13686 if (CONSTRUCTOR_NELTS (arg0) == 0)
13687 return build_constructor (type, NULL);
13688 idx /= k;
13689 n /= k;
13690 if (n == 1)
13691 {
13692 if (idx < CONSTRUCTOR_NELTS (arg0))
13693 return CONSTRUCTOR_ELT (arg0, idx)->value;
13694 return build_zero_cst (type);
13695 }
13696
13697 vec<constructor_elt, va_gc> *vals;
13698 vec_alloc (vals, n);
13699 for (unsigned i = 0;
13700 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13701 ++i)
13702 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13703 CONSTRUCTOR_ELT
13704 (arg0, idx + i)->value);
13705 return build_constructor (type, vals);
13706 }
13707 /* The bitfield references a single constructor element. */
13708 else if (idx + n <= (idx / k + 1) * k)
13709 {
13710 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13711 return build_zero_cst (type);
13712 else if (n == k)
13713 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13714 else
13715 return fold_build3_loc (loc, code, type,
13716 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13717 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13718 }
13719 }
13720 }
13721
13722 /* A bit-field-ref that referenced the full argument can be stripped. */
13723 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13724 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13725 && integer_zerop (op2))
13726 return fold_convert_loc (loc, type, arg0);
13727
13728 /* On constants we can use native encode/interpret to constant
13729 fold (nearly) all BIT_FIELD_REFs. */
13730 if (CONSTANT_CLASS_P (arg0)
13731 && can_native_interpret_type_p (type)
13732 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13733 /* This limitation should not be necessary, we just need to
13734 round this up to mode size. */
13735 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13736 /* Need bit-shifting of the buffer to relax the following. */
13737 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13738 {
13739 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13740 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13741 unsigned HOST_WIDE_INT clen;
13742 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13743 /* ??? We cannot tell native_encode_expr to start at
13744 some random byte only. So limit us to a reasonable amount
13745 of work. */
13746 if (clen <= 4096)
13747 {
13748 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13749 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13750 if (len > 0
13751 && len * BITS_PER_UNIT >= bitpos + bitsize)
13752 {
13753 tree v = native_interpret_expr (type,
13754 b + bitpos / BITS_PER_UNIT,
13755 bitsize / BITS_PER_UNIT);
13756 if (v)
13757 return v;
13758 }
13759 }
13760 }
13761
13762 return NULL_TREE;
13763
13764 case FMA_EXPR:
13765 /* For integers we can decompose the FMA if possible. */
13766 if (TREE_CODE (arg0) == INTEGER_CST
13767 && TREE_CODE (arg1) == INTEGER_CST)
13768 return fold_build2_loc (loc, PLUS_EXPR, type,
13769 const_binop (MULT_EXPR, arg0, arg1), arg2);
13770 if (integer_zerop (arg2))
13771 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13772
13773 return fold_fma (loc, type, arg0, arg1, arg2);
13774
13775 case VEC_PERM_EXPR:
13776 if (TREE_CODE (arg2) == VECTOR_CST)
13777 {
13778 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13779 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13780 unsigned char *sel2 = sel + nelts;
13781 bool need_mask_canon = false;
13782 bool need_mask_canon2 = false;
13783 bool all_in_vec0 = true;
13784 bool all_in_vec1 = true;
13785 bool maybe_identity = true;
13786 bool single_arg = (op0 == op1);
13787 bool changed = false;
13788
13789 mask2 = 2 * nelts - 1;
13790 mask = single_arg ? (nelts - 1) : mask2;
13791 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13792 for (i = 0; i < nelts; i++)
13793 {
13794 tree val = VECTOR_CST_ELT (arg2, i);
13795 if (TREE_CODE (val) != INTEGER_CST)
13796 return NULL_TREE;
13797
13798 /* Make sure that the perm value is in an acceptable
13799 range. */
13800 wide_int t = val;
13801 need_mask_canon |= wi::gtu_p (t, mask);
13802 need_mask_canon2 |= wi::gtu_p (t, mask2);
13803 sel[i] = t.to_uhwi () & mask;
13804 sel2[i] = t.to_uhwi () & mask2;
13805
13806 if (sel[i] < nelts)
13807 all_in_vec1 = false;
13808 else
13809 all_in_vec0 = false;
13810
13811 if ((sel[i] & (nelts-1)) != i)
13812 maybe_identity = false;
13813 }
13814
13815 if (maybe_identity)
13816 {
13817 if (all_in_vec0)
13818 return op0;
13819 if (all_in_vec1)
13820 return op1;
13821 }
13822
13823 if (all_in_vec0)
13824 op1 = op0;
13825 else if (all_in_vec1)
13826 {
13827 op0 = op1;
13828 for (i = 0; i < nelts; i++)
13829 sel[i] -= nelts;
13830 need_mask_canon = true;
13831 }
13832
13833 if ((TREE_CODE (op0) == VECTOR_CST
13834 || TREE_CODE (op0) == CONSTRUCTOR)
13835 && (TREE_CODE (op1) == VECTOR_CST
13836 || TREE_CODE (op1) == CONSTRUCTOR))
13837 {
13838 tree t = fold_vec_perm (type, op0, op1, sel);
13839 if (t != NULL_TREE)
13840 return t;
13841 }
13842
13843 if (op0 == op1 && !single_arg)
13844 changed = true;
13845
13846 /* Some targets are deficient and fail to expand a single
13847 argument permutation while still allowing an equivalent
13848 2-argument version. */
13849 if (need_mask_canon && arg2 == op2
13850 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13851 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13852 {
13853 need_mask_canon = need_mask_canon2;
13854 sel = sel2;
13855 }
13856
13857 if (need_mask_canon && arg2 == op2)
13858 {
13859 tree *tsel = XALLOCAVEC (tree, nelts);
13860 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13861 for (i = 0; i < nelts; i++)
13862 tsel[i] = build_int_cst (eltype, sel[i]);
13863 op2 = build_vector (TREE_TYPE (arg2), tsel);
13864 changed = true;
13865 }
13866
13867 if (changed)
13868 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13869 }
13870 return NULL_TREE;
13871
13872 default:
13873 return NULL_TREE;
13874 } /* switch (code) */
13875 }
13876
13877 /* Perform constant folding and related simplification of EXPR.
13878 The related simplifications include x*1 => x, x*0 => 0, etc.,
13879 and application of the associative law.
13880 NOP_EXPR conversions may be removed freely (as long as we
13881 are careful not to change the type of the overall expression).
13882 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13883 but we can constant-fold them if they have constant operands. */
13884
13885 #ifdef ENABLE_FOLD_CHECKING
13886 # define fold(x) fold_1 (x)
13887 static tree fold_1 (tree);
13888 static
13889 #endif
13890 tree
13891 fold (tree expr)
13892 {
13893 const tree t = expr;
13894 enum tree_code code = TREE_CODE (t);
13895 enum tree_code_class kind = TREE_CODE_CLASS (code);
13896 tree tem;
13897 location_t loc = EXPR_LOCATION (expr);
13898
13899 /* Return right away if a constant. */
13900 if (kind == tcc_constant)
13901 return t;
13902
13903 /* CALL_EXPR-like objects with variable numbers of operands are
13904 treated specially. */
13905 if (kind == tcc_vl_exp)
13906 {
13907 if (code == CALL_EXPR)
13908 {
13909 tem = fold_call_expr (loc, expr, false);
13910 return tem ? tem : expr;
13911 }
13912 return expr;
13913 }
13914
13915 if (IS_EXPR_CODE_CLASS (kind))
13916 {
13917 tree type = TREE_TYPE (t);
13918 tree op0, op1, op2;
13919
13920 switch (TREE_CODE_LENGTH (code))
13921 {
13922 case 1:
13923 op0 = TREE_OPERAND (t, 0);
13924 tem = fold_unary_loc (loc, code, type, op0);
13925 return tem ? tem : expr;
13926 case 2:
13927 op0 = TREE_OPERAND (t, 0);
13928 op1 = TREE_OPERAND (t, 1);
13929 tem = fold_binary_loc (loc, code, type, op0, op1);
13930 return tem ? tem : expr;
13931 case 3:
13932 op0 = TREE_OPERAND (t, 0);
13933 op1 = TREE_OPERAND (t, 1);
13934 op2 = TREE_OPERAND (t, 2);
13935 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13936 return tem ? tem : expr;
13937 default:
13938 break;
13939 }
13940 }
13941
13942 switch (code)
13943 {
13944 case ARRAY_REF:
13945 {
13946 tree op0 = TREE_OPERAND (t, 0);
13947 tree op1 = TREE_OPERAND (t, 1);
13948
13949 if (TREE_CODE (op1) == INTEGER_CST
13950 && TREE_CODE (op0) == CONSTRUCTOR
13951 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13952 {
13953 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13954 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13955 unsigned HOST_WIDE_INT begin = 0;
13956
13957 /* Find a matching index by means of a binary search. */
13958 while (begin != end)
13959 {
13960 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13961 tree index = (*elts)[middle].index;
13962
13963 if (TREE_CODE (index) == INTEGER_CST
13964 && tree_int_cst_lt (index, op1))
13965 begin = middle + 1;
13966 else if (TREE_CODE (index) == INTEGER_CST
13967 && tree_int_cst_lt (op1, index))
13968 end = middle;
13969 else if (TREE_CODE (index) == RANGE_EXPR
13970 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13971 begin = middle + 1;
13972 else if (TREE_CODE (index) == RANGE_EXPR
13973 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13974 end = middle;
13975 else
13976 return (*elts)[middle].value;
13977 }
13978 }
13979
13980 return t;
13981 }
13982
13983 /* Return a VECTOR_CST if possible. */
13984 case CONSTRUCTOR:
13985 {
13986 tree type = TREE_TYPE (t);
13987 if (TREE_CODE (type) != VECTOR_TYPE)
13988 return t;
13989
13990 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13991 unsigned HOST_WIDE_INT idx, pos = 0;
13992 tree value;
13993
13994 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13995 {
13996 if (!CONSTANT_CLASS_P (value))
13997 return t;
13998 if (TREE_CODE (value) == VECTOR_CST)
13999 {
14000 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14001 vec[pos++] = VECTOR_CST_ELT (value, i);
14002 }
14003 else
14004 vec[pos++] = value;
14005 }
14006 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14007 vec[pos] = build_zero_cst (TREE_TYPE (type));
14008
14009 return build_vector (type, vec);
14010 }
14011
14012 case CONST_DECL:
14013 return fold (DECL_INITIAL (t));
14014
14015 default:
14016 return t;
14017 } /* switch (code) */
14018 }
14019
14020 #ifdef ENABLE_FOLD_CHECKING
14021 #undef fold
14022
14023 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14024 hash_table<pointer_hash<const tree_node> > *);
14025 static void fold_check_failed (const_tree, const_tree);
14026 void print_fold_checksum (const_tree);
14027
14028 /* When --enable-checking=fold, compute a digest of expr before
14029 and after actual fold call to see if fold did not accidentally
14030 change original expr. */
14031
14032 tree
14033 fold (tree expr)
14034 {
14035 tree ret;
14036 struct md5_ctx ctx;
14037 unsigned char checksum_before[16], checksum_after[16];
14038 hash_table<pointer_hash<const tree_node> > ht (32);
14039
14040 md5_init_ctx (&ctx);
14041 fold_checksum_tree (expr, &ctx, &ht);
14042 md5_finish_ctx (&ctx, checksum_before);
14043 ht.empty ();
14044
14045 ret = fold_1 (expr);
14046
14047 md5_init_ctx (&ctx);
14048 fold_checksum_tree (expr, &ctx, &ht);
14049 md5_finish_ctx (&ctx, checksum_after);
14050
14051 if (memcmp (checksum_before, checksum_after, 16))
14052 fold_check_failed (expr, ret);
14053
14054 return ret;
14055 }
14056
14057 void
14058 print_fold_checksum (const_tree expr)
14059 {
14060 struct md5_ctx ctx;
14061 unsigned char checksum[16], cnt;
14062 hash_table<pointer_hash<const tree_node> > ht (32);
14063
14064 md5_init_ctx (&ctx);
14065 fold_checksum_tree (expr, &ctx, &ht);
14066 md5_finish_ctx (&ctx, checksum);
14067 for (cnt = 0; cnt < 16; ++cnt)
14068 fprintf (stderr, "%02x", checksum[cnt]);
14069 putc ('\n', stderr);
14070 }
14071
14072 static void
14073 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14074 {
14075 internal_error ("fold check: original tree changed by fold");
14076 }
14077
14078 static void
14079 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14080 hash_table<pointer_hash <const tree_node> > *ht)
14081 {
14082 const tree_node **slot;
14083 enum tree_code code;
14084 union tree_node buf;
14085 int i, len;
14086
14087 recursive_label:
14088 if (expr == NULL)
14089 return;
14090 slot = ht->find_slot (expr, INSERT);
14091 if (*slot != NULL)
14092 return;
14093 *slot = expr;
14094 code = TREE_CODE (expr);
14095 if (TREE_CODE_CLASS (code) == tcc_declaration
14096 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14097 {
14098 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14099 memcpy ((char *) &buf, expr, tree_size (expr));
14100 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14101 buf.decl_with_vis.symtab_node = NULL;
14102 expr = (tree) &buf;
14103 }
14104 else if (TREE_CODE_CLASS (code) == tcc_type
14105 && (TYPE_POINTER_TO (expr)
14106 || TYPE_REFERENCE_TO (expr)
14107 || TYPE_CACHED_VALUES_P (expr)
14108 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14109 || TYPE_NEXT_VARIANT (expr)))
14110 {
14111 /* Allow these fields to be modified. */
14112 tree tmp;
14113 memcpy ((char *) &buf, expr, tree_size (expr));
14114 expr = tmp = (tree) &buf;
14115 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14116 TYPE_POINTER_TO (tmp) = NULL;
14117 TYPE_REFERENCE_TO (tmp) = NULL;
14118 TYPE_NEXT_VARIANT (tmp) = NULL;
14119 if (TYPE_CACHED_VALUES_P (tmp))
14120 {
14121 TYPE_CACHED_VALUES_P (tmp) = 0;
14122 TYPE_CACHED_VALUES (tmp) = NULL;
14123 }
14124 }
14125 md5_process_bytes (expr, tree_size (expr), ctx);
14126 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14127 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14128 if (TREE_CODE_CLASS (code) != tcc_type
14129 && TREE_CODE_CLASS (code) != tcc_declaration
14130 && code != TREE_LIST
14131 && code != SSA_NAME
14132 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14133 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14134 switch (TREE_CODE_CLASS (code))
14135 {
14136 case tcc_constant:
14137 switch (code)
14138 {
14139 case STRING_CST:
14140 md5_process_bytes (TREE_STRING_POINTER (expr),
14141 TREE_STRING_LENGTH (expr), ctx);
14142 break;
14143 case COMPLEX_CST:
14144 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14145 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14146 break;
14147 case VECTOR_CST:
14148 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14149 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14150 break;
14151 default:
14152 break;
14153 }
14154 break;
14155 case tcc_exceptional:
14156 switch (code)
14157 {
14158 case TREE_LIST:
14159 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14160 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14161 expr = TREE_CHAIN (expr);
14162 goto recursive_label;
14163 break;
14164 case TREE_VEC:
14165 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14166 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14167 break;
14168 default:
14169 break;
14170 }
14171 break;
14172 case tcc_expression:
14173 case tcc_reference:
14174 case tcc_comparison:
14175 case tcc_unary:
14176 case tcc_binary:
14177 case tcc_statement:
14178 case tcc_vl_exp:
14179 len = TREE_OPERAND_LENGTH (expr);
14180 for (i = 0; i < len; ++i)
14181 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14182 break;
14183 case tcc_declaration:
14184 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14185 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14186 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14187 {
14188 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14189 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14190 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14191 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14192 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14193 }
14194
14195 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14196 {
14197 if (TREE_CODE (expr) == FUNCTION_DECL)
14198 {
14199 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14200 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14201 }
14202 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14203 }
14204 break;
14205 case tcc_type:
14206 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14207 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14208 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14209 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14210 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14211 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14212 if (INTEGRAL_TYPE_P (expr)
14213 || SCALAR_FLOAT_TYPE_P (expr))
14214 {
14215 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14216 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14217 }
14218 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14219 if (TREE_CODE (expr) == RECORD_TYPE
14220 || TREE_CODE (expr) == UNION_TYPE
14221 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14222 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14223 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14224 break;
14225 default:
14226 break;
14227 }
14228 }
14229
14230 /* Helper function for outputting the checksum of a tree T. When
14231 debugging with gdb, you can "define mynext" to be "next" followed
14232 by "call debug_fold_checksum (op0)", then just trace down till the
14233 outputs differ. */
14234
14235 DEBUG_FUNCTION void
14236 debug_fold_checksum (const_tree t)
14237 {
14238 int i;
14239 unsigned char checksum[16];
14240 struct md5_ctx ctx;
14241 hash_table<pointer_hash<const tree_node> > ht (32);
14242
14243 md5_init_ctx (&ctx);
14244 fold_checksum_tree (t, &ctx, &ht);
14245 md5_finish_ctx (&ctx, checksum);
14246 ht.empty ();
14247
14248 for (i = 0; i < 16; i++)
14249 fprintf (stderr, "%d ", checksum[i]);
14250
14251 fprintf (stderr, "\n");
14252 }
14253
14254 #endif
14255
14256 /* Fold a unary tree expression with code CODE of type TYPE with an
14257 operand OP0. LOC is the location of the resulting expression.
14258 Return a folded expression if successful. Otherwise, return a tree
14259 expression with code CODE of type TYPE with an operand OP0. */
14260
14261 tree
14262 fold_build1_stat_loc (location_t loc,
14263 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14264 {
14265 tree tem;
14266 #ifdef ENABLE_FOLD_CHECKING
14267 unsigned char checksum_before[16], checksum_after[16];
14268 struct md5_ctx ctx;
14269 hash_table<pointer_hash<const tree_node> > ht (32);
14270
14271 md5_init_ctx (&ctx);
14272 fold_checksum_tree (op0, &ctx, &ht);
14273 md5_finish_ctx (&ctx, checksum_before);
14274 ht.empty ();
14275 #endif
14276
14277 tem = fold_unary_loc (loc, code, type, op0);
14278 if (!tem)
14279 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14280
14281 #ifdef ENABLE_FOLD_CHECKING
14282 md5_init_ctx (&ctx);
14283 fold_checksum_tree (op0, &ctx, &ht);
14284 md5_finish_ctx (&ctx, checksum_after);
14285
14286 if (memcmp (checksum_before, checksum_after, 16))
14287 fold_check_failed (op0, tem);
14288 #endif
14289 return tem;
14290 }
14291
14292 /* Fold a binary tree expression with code CODE of type TYPE with
14293 operands OP0 and OP1. LOC is the location of the resulting
14294 expression. Return a folded expression if successful. Otherwise,
14295 return a tree expression with code CODE of type TYPE with operands
14296 OP0 and OP1. */
14297
14298 tree
14299 fold_build2_stat_loc (location_t loc,
14300 enum tree_code code, tree type, tree op0, tree op1
14301 MEM_STAT_DECL)
14302 {
14303 tree tem;
14304 #ifdef ENABLE_FOLD_CHECKING
14305 unsigned char checksum_before_op0[16],
14306 checksum_before_op1[16],
14307 checksum_after_op0[16],
14308 checksum_after_op1[16];
14309 struct md5_ctx ctx;
14310 hash_table<pointer_hash<const tree_node> > ht (32);
14311
14312 md5_init_ctx (&ctx);
14313 fold_checksum_tree (op0, &ctx, &ht);
14314 md5_finish_ctx (&ctx, checksum_before_op0);
14315 ht.empty ();
14316
14317 md5_init_ctx (&ctx);
14318 fold_checksum_tree (op1, &ctx, &ht);
14319 md5_finish_ctx (&ctx, checksum_before_op1);
14320 ht.empty ();
14321 #endif
14322
14323 tem = fold_binary_loc (loc, code, type, op0, op1);
14324 if (!tem)
14325 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14326
14327 #ifdef ENABLE_FOLD_CHECKING
14328 md5_init_ctx (&ctx);
14329 fold_checksum_tree (op0, &ctx, &ht);
14330 md5_finish_ctx (&ctx, checksum_after_op0);
14331 ht.empty ();
14332
14333 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14334 fold_check_failed (op0, tem);
14335
14336 md5_init_ctx (&ctx);
14337 fold_checksum_tree (op1, &ctx, &ht);
14338 md5_finish_ctx (&ctx, checksum_after_op1);
14339
14340 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14341 fold_check_failed (op1, tem);
14342 #endif
14343 return tem;
14344 }
14345
14346 /* Fold a ternary tree expression with code CODE of type TYPE with
14347 operands OP0, OP1, and OP2. Return a folded expression if
14348 successful. Otherwise, return a tree expression with code CODE of
14349 type TYPE with operands OP0, OP1, and OP2. */
14350
14351 tree
14352 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14353 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14354 {
14355 tree tem;
14356 #ifdef ENABLE_FOLD_CHECKING
14357 unsigned char checksum_before_op0[16],
14358 checksum_before_op1[16],
14359 checksum_before_op2[16],
14360 checksum_after_op0[16],
14361 checksum_after_op1[16],
14362 checksum_after_op2[16];
14363 struct md5_ctx ctx;
14364 hash_table<pointer_hash<const tree_node> > ht (32);
14365
14366 md5_init_ctx (&ctx);
14367 fold_checksum_tree (op0, &ctx, &ht);
14368 md5_finish_ctx (&ctx, checksum_before_op0);
14369 ht.empty ();
14370
14371 md5_init_ctx (&ctx);
14372 fold_checksum_tree (op1, &ctx, &ht);
14373 md5_finish_ctx (&ctx, checksum_before_op1);
14374 ht.empty ();
14375
14376 md5_init_ctx (&ctx);
14377 fold_checksum_tree (op2, &ctx, &ht);
14378 md5_finish_ctx (&ctx, checksum_before_op2);
14379 ht.empty ();
14380 #endif
14381
14382 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14383 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14384 if (!tem)
14385 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14386
14387 #ifdef ENABLE_FOLD_CHECKING
14388 md5_init_ctx (&ctx);
14389 fold_checksum_tree (op0, &ctx, &ht);
14390 md5_finish_ctx (&ctx, checksum_after_op0);
14391 ht.empty ();
14392
14393 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14394 fold_check_failed (op0, tem);
14395
14396 md5_init_ctx (&ctx);
14397 fold_checksum_tree (op1, &ctx, &ht);
14398 md5_finish_ctx (&ctx, checksum_after_op1);
14399 ht.empty ();
14400
14401 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14402 fold_check_failed (op1, tem);
14403
14404 md5_init_ctx (&ctx);
14405 fold_checksum_tree (op2, &ctx, &ht);
14406 md5_finish_ctx (&ctx, checksum_after_op2);
14407
14408 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14409 fold_check_failed (op2, tem);
14410 #endif
14411 return tem;
14412 }
14413
14414 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14415 arguments in ARGARRAY, and a null static chain.
14416 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14417 of type TYPE from the given operands as constructed by build_call_array. */
14418
14419 tree
14420 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14421 int nargs, tree *argarray)
14422 {
14423 tree tem;
14424 #ifdef ENABLE_FOLD_CHECKING
14425 unsigned char checksum_before_fn[16],
14426 checksum_before_arglist[16],
14427 checksum_after_fn[16],
14428 checksum_after_arglist[16];
14429 struct md5_ctx ctx;
14430 hash_table<pointer_hash<const tree_node> > ht (32);
14431 int i;
14432
14433 md5_init_ctx (&ctx);
14434 fold_checksum_tree (fn, &ctx, &ht);
14435 md5_finish_ctx (&ctx, checksum_before_fn);
14436 ht.empty ();
14437
14438 md5_init_ctx (&ctx);
14439 for (i = 0; i < nargs; i++)
14440 fold_checksum_tree (argarray[i], &ctx, &ht);
14441 md5_finish_ctx (&ctx, checksum_before_arglist);
14442 ht.empty ();
14443 #endif
14444
14445 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14446 if (!tem)
14447 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14448
14449 #ifdef ENABLE_FOLD_CHECKING
14450 md5_init_ctx (&ctx);
14451 fold_checksum_tree (fn, &ctx, &ht);
14452 md5_finish_ctx (&ctx, checksum_after_fn);
14453 ht.empty ();
14454
14455 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14456 fold_check_failed (fn, tem);
14457
14458 md5_init_ctx (&ctx);
14459 for (i = 0; i < nargs; i++)
14460 fold_checksum_tree (argarray[i], &ctx, &ht);
14461 md5_finish_ctx (&ctx, checksum_after_arglist);
14462
14463 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14464 fold_check_failed (NULL_TREE, tem);
14465 #endif
14466 return tem;
14467 }
14468
14469 /* Perform constant folding and related simplification of initializer
14470 expression EXPR. These behave identically to "fold_buildN" but ignore
14471 potential run-time traps and exceptions that fold must preserve. */
14472
14473 #define START_FOLD_INIT \
14474 int saved_signaling_nans = flag_signaling_nans;\
14475 int saved_trapping_math = flag_trapping_math;\
14476 int saved_rounding_math = flag_rounding_math;\
14477 int saved_trapv = flag_trapv;\
14478 int saved_folding_initializer = folding_initializer;\
14479 flag_signaling_nans = 0;\
14480 flag_trapping_math = 0;\
14481 flag_rounding_math = 0;\
14482 flag_trapv = 0;\
14483 folding_initializer = 1;
14484
14485 #define END_FOLD_INIT \
14486 flag_signaling_nans = saved_signaling_nans;\
14487 flag_trapping_math = saved_trapping_math;\
14488 flag_rounding_math = saved_rounding_math;\
14489 flag_trapv = saved_trapv;\
14490 folding_initializer = saved_folding_initializer;
14491
14492 tree
14493 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14494 tree type, tree op)
14495 {
14496 tree result;
14497 START_FOLD_INIT;
14498
14499 result = fold_build1_loc (loc, code, type, op);
14500
14501 END_FOLD_INIT;
14502 return result;
14503 }
14504
14505 tree
14506 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14507 tree type, tree op0, tree op1)
14508 {
14509 tree result;
14510 START_FOLD_INIT;
14511
14512 result = fold_build2_loc (loc, code, type, op0, op1);
14513
14514 END_FOLD_INIT;
14515 return result;
14516 }
14517
14518 tree
14519 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14520 int nargs, tree *argarray)
14521 {
14522 tree result;
14523 START_FOLD_INIT;
14524
14525 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14526
14527 END_FOLD_INIT;
14528 return result;
14529 }
14530
14531 #undef START_FOLD_INIT
14532 #undef END_FOLD_INIT
14533
14534 /* Determine if first argument is a multiple of second argument. Return 0 if
14535 it is not, or we cannot easily determined it to be.
14536
14537 An example of the sort of thing we care about (at this point; this routine
14538 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14539 fold cases do now) is discovering that
14540
14541 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14542
14543 is a multiple of
14544
14545 SAVE_EXPR (J * 8)
14546
14547 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14548
14549 This code also handles discovering that
14550
14551 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14552
14553 is a multiple of 8 so we don't have to worry about dealing with a
14554 possible remainder.
14555
14556 Note that we *look* inside a SAVE_EXPR only to determine how it was
14557 calculated; it is not safe for fold to do much of anything else with the
14558 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14559 at run time. For example, the latter example above *cannot* be implemented
14560 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14561 evaluation time of the original SAVE_EXPR is not necessarily the same at
14562 the time the new expression is evaluated. The only optimization of this
14563 sort that would be valid is changing
14564
14565 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14566
14567 divided by 8 to
14568
14569 SAVE_EXPR (I) * SAVE_EXPR (J)
14570
14571 (where the same SAVE_EXPR (J) is used in the original and the
14572 transformed version). */
14573
14574 int
14575 multiple_of_p (tree type, const_tree top, const_tree bottom)
14576 {
14577 if (operand_equal_p (top, bottom, 0))
14578 return 1;
14579
14580 if (TREE_CODE (type) != INTEGER_TYPE)
14581 return 0;
14582
14583 switch (TREE_CODE (top))
14584 {
14585 case BIT_AND_EXPR:
14586 /* Bitwise and provides a power of two multiple. If the mask is
14587 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14588 if (!integer_pow2p (bottom))
14589 return 0;
14590 /* FALLTHRU */
14591
14592 case MULT_EXPR:
14593 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14594 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14595
14596 case PLUS_EXPR:
14597 case MINUS_EXPR:
14598 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14599 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14600
14601 case LSHIFT_EXPR:
14602 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14603 {
14604 tree op1, t1;
14605
14606 op1 = TREE_OPERAND (top, 1);
14607 /* const_binop may not detect overflow correctly,
14608 so check for it explicitly here. */
14609 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14610 && 0 != (t1 = fold_convert (type,
14611 const_binop (LSHIFT_EXPR,
14612 size_one_node,
14613 op1)))
14614 && !TREE_OVERFLOW (t1))
14615 return multiple_of_p (type, t1, bottom);
14616 }
14617 return 0;
14618
14619 case NOP_EXPR:
14620 /* Can't handle conversions from non-integral or wider integral type. */
14621 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14622 || (TYPE_PRECISION (type)
14623 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14624 return 0;
14625
14626 /* .. fall through ... */
14627
14628 case SAVE_EXPR:
14629 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14630
14631 case COND_EXPR:
14632 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14633 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14634
14635 case INTEGER_CST:
14636 if (TREE_CODE (bottom) != INTEGER_CST
14637 || integer_zerop (bottom)
14638 || (TYPE_UNSIGNED (type)
14639 && (tree_int_cst_sgn (top) < 0
14640 || tree_int_cst_sgn (bottom) < 0)))
14641 return 0;
14642 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14643 SIGNED);
14644
14645 default:
14646 return 0;
14647 }
14648 }
14649
14650 /* Return true if CODE or TYPE is known to be non-negative. */
14651
14652 static bool
14653 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14654 {
14655 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14656 && truth_value_p (code))
14657 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14658 have a signed:1 type (where the value is -1 and 0). */
14659 return true;
14660 return false;
14661 }
14662
14663 /* Return true if (CODE OP0) is known to be non-negative. If the return
14664 value is based on the assumption that signed overflow is undefined,
14665 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14666 *STRICT_OVERFLOW_P. */
14667
14668 bool
14669 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14670 bool *strict_overflow_p)
14671 {
14672 if (TYPE_UNSIGNED (type))
14673 return true;
14674
14675 switch (code)
14676 {
14677 case ABS_EXPR:
14678 /* We can't return 1 if flag_wrapv is set because
14679 ABS_EXPR<INT_MIN> = INT_MIN. */
14680 if (!INTEGRAL_TYPE_P (type))
14681 return true;
14682 if (TYPE_OVERFLOW_UNDEFINED (type))
14683 {
14684 *strict_overflow_p = true;
14685 return true;
14686 }
14687 break;
14688
14689 case NON_LVALUE_EXPR:
14690 case FLOAT_EXPR:
14691 case FIX_TRUNC_EXPR:
14692 return tree_expr_nonnegative_warnv_p (op0,
14693 strict_overflow_p);
14694
14695 CASE_CONVERT:
14696 {
14697 tree inner_type = TREE_TYPE (op0);
14698 tree outer_type = type;
14699
14700 if (TREE_CODE (outer_type) == REAL_TYPE)
14701 {
14702 if (TREE_CODE (inner_type) == REAL_TYPE)
14703 return tree_expr_nonnegative_warnv_p (op0,
14704 strict_overflow_p);
14705 if (INTEGRAL_TYPE_P (inner_type))
14706 {
14707 if (TYPE_UNSIGNED (inner_type))
14708 return true;
14709 return tree_expr_nonnegative_warnv_p (op0,
14710 strict_overflow_p);
14711 }
14712 }
14713 else if (INTEGRAL_TYPE_P (outer_type))
14714 {
14715 if (TREE_CODE (inner_type) == REAL_TYPE)
14716 return tree_expr_nonnegative_warnv_p (op0,
14717 strict_overflow_p);
14718 if (INTEGRAL_TYPE_P (inner_type))
14719 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14720 && TYPE_UNSIGNED (inner_type);
14721 }
14722 }
14723 break;
14724
14725 default:
14726 return tree_simple_nonnegative_warnv_p (code, type);
14727 }
14728
14729 /* We don't know sign of `t', so be conservative and return false. */
14730 return false;
14731 }
14732
14733 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14734 value is based on the assumption that signed overflow is undefined,
14735 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14736 *STRICT_OVERFLOW_P. */
14737
14738 bool
14739 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14740 tree op1, bool *strict_overflow_p)
14741 {
14742 if (TYPE_UNSIGNED (type))
14743 return true;
14744
14745 switch (code)
14746 {
14747 case POINTER_PLUS_EXPR:
14748 case PLUS_EXPR:
14749 if (FLOAT_TYPE_P (type))
14750 return (tree_expr_nonnegative_warnv_p (op0,
14751 strict_overflow_p)
14752 && tree_expr_nonnegative_warnv_p (op1,
14753 strict_overflow_p));
14754
14755 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14756 both unsigned and at least 2 bits shorter than the result. */
14757 if (TREE_CODE (type) == INTEGER_TYPE
14758 && TREE_CODE (op0) == NOP_EXPR
14759 && TREE_CODE (op1) == NOP_EXPR)
14760 {
14761 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14762 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14763 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14764 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14765 {
14766 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14767 TYPE_PRECISION (inner2)) + 1;
14768 return prec < TYPE_PRECISION (type);
14769 }
14770 }
14771 break;
14772
14773 case MULT_EXPR:
14774 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14775 {
14776 /* x * x is always non-negative for floating point x
14777 or without overflow. */
14778 if (operand_equal_p (op0, op1, 0)
14779 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14780 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14781 {
14782 if (ANY_INTEGRAL_TYPE_P (type)
14783 && TYPE_OVERFLOW_UNDEFINED (type))
14784 *strict_overflow_p = true;
14785 return true;
14786 }
14787 }
14788
14789 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14790 both unsigned and their total bits is shorter than the result. */
14791 if (TREE_CODE (type) == INTEGER_TYPE
14792 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14793 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14794 {
14795 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14796 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14797 : TREE_TYPE (op0);
14798 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14799 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14800 : TREE_TYPE (op1);
14801
14802 bool unsigned0 = TYPE_UNSIGNED (inner0);
14803 bool unsigned1 = TYPE_UNSIGNED (inner1);
14804
14805 if (TREE_CODE (op0) == INTEGER_CST)
14806 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14807
14808 if (TREE_CODE (op1) == INTEGER_CST)
14809 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14810
14811 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14812 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14813 {
14814 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14815 ? tree_int_cst_min_precision (op0, UNSIGNED)
14816 : TYPE_PRECISION (inner0);
14817
14818 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14819 ? tree_int_cst_min_precision (op1, UNSIGNED)
14820 : TYPE_PRECISION (inner1);
14821
14822 return precision0 + precision1 < TYPE_PRECISION (type);
14823 }
14824 }
14825 return false;
14826
14827 case BIT_AND_EXPR:
14828 case MAX_EXPR:
14829 return (tree_expr_nonnegative_warnv_p (op0,
14830 strict_overflow_p)
14831 || tree_expr_nonnegative_warnv_p (op1,
14832 strict_overflow_p));
14833
14834 case BIT_IOR_EXPR:
14835 case BIT_XOR_EXPR:
14836 case MIN_EXPR:
14837 case RDIV_EXPR:
14838 case TRUNC_DIV_EXPR:
14839 case CEIL_DIV_EXPR:
14840 case FLOOR_DIV_EXPR:
14841 case ROUND_DIV_EXPR:
14842 return (tree_expr_nonnegative_warnv_p (op0,
14843 strict_overflow_p)
14844 && tree_expr_nonnegative_warnv_p (op1,
14845 strict_overflow_p));
14846
14847 case TRUNC_MOD_EXPR:
14848 case CEIL_MOD_EXPR:
14849 case FLOOR_MOD_EXPR:
14850 case ROUND_MOD_EXPR:
14851 return tree_expr_nonnegative_warnv_p (op0,
14852 strict_overflow_p);
14853 default:
14854 return tree_simple_nonnegative_warnv_p (code, type);
14855 }
14856
14857 /* We don't know sign of `t', so be conservative and return false. */
14858 return false;
14859 }
14860
14861 /* Return true if T is known to be non-negative. If the return
14862 value is based on the assumption that signed overflow is undefined,
14863 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14864 *STRICT_OVERFLOW_P. */
14865
14866 bool
14867 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14868 {
14869 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14870 return true;
14871
14872 switch (TREE_CODE (t))
14873 {
14874 case INTEGER_CST:
14875 return tree_int_cst_sgn (t) >= 0;
14876
14877 case REAL_CST:
14878 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14879
14880 case FIXED_CST:
14881 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14882
14883 case COND_EXPR:
14884 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14885 strict_overflow_p)
14886 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14887 strict_overflow_p));
14888 default:
14889 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14890 TREE_TYPE (t));
14891 }
14892 /* We don't know sign of `t', so be conservative and return false. */
14893 return false;
14894 }
14895
14896 /* Return true if T is known to be non-negative. If the return
14897 value is based on the assumption that signed overflow is undefined,
14898 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14899 *STRICT_OVERFLOW_P. */
14900
14901 bool
14902 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14903 tree arg0, tree arg1, bool *strict_overflow_p)
14904 {
14905 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14906 switch (DECL_FUNCTION_CODE (fndecl))
14907 {
14908 CASE_FLT_FN (BUILT_IN_ACOS):
14909 CASE_FLT_FN (BUILT_IN_ACOSH):
14910 CASE_FLT_FN (BUILT_IN_CABS):
14911 CASE_FLT_FN (BUILT_IN_COSH):
14912 CASE_FLT_FN (BUILT_IN_ERFC):
14913 CASE_FLT_FN (BUILT_IN_EXP):
14914 CASE_FLT_FN (BUILT_IN_EXP10):
14915 CASE_FLT_FN (BUILT_IN_EXP2):
14916 CASE_FLT_FN (BUILT_IN_FABS):
14917 CASE_FLT_FN (BUILT_IN_FDIM):
14918 CASE_FLT_FN (BUILT_IN_HYPOT):
14919 CASE_FLT_FN (BUILT_IN_POW10):
14920 CASE_INT_FN (BUILT_IN_FFS):
14921 CASE_INT_FN (BUILT_IN_PARITY):
14922 CASE_INT_FN (BUILT_IN_POPCOUNT):
14923 CASE_INT_FN (BUILT_IN_CLZ):
14924 CASE_INT_FN (BUILT_IN_CLRSB):
14925 case BUILT_IN_BSWAP32:
14926 case BUILT_IN_BSWAP64:
14927 /* Always true. */
14928 return true;
14929
14930 CASE_FLT_FN (BUILT_IN_SQRT):
14931 /* sqrt(-0.0) is -0.0. */
14932 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14933 return true;
14934 return tree_expr_nonnegative_warnv_p (arg0,
14935 strict_overflow_p);
14936
14937 CASE_FLT_FN (BUILT_IN_ASINH):
14938 CASE_FLT_FN (BUILT_IN_ATAN):
14939 CASE_FLT_FN (BUILT_IN_ATANH):
14940 CASE_FLT_FN (BUILT_IN_CBRT):
14941 CASE_FLT_FN (BUILT_IN_CEIL):
14942 CASE_FLT_FN (BUILT_IN_ERF):
14943 CASE_FLT_FN (BUILT_IN_EXPM1):
14944 CASE_FLT_FN (BUILT_IN_FLOOR):
14945 CASE_FLT_FN (BUILT_IN_FMOD):
14946 CASE_FLT_FN (BUILT_IN_FREXP):
14947 CASE_FLT_FN (BUILT_IN_ICEIL):
14948 CASE_FLT_FN (BUILT_IN_IFLOOR):
14949 CASE_FLT_FN (BUILT_IN_IRINT):
14950 CASE_FLT_FN (BUILT_IN_IROUND):
14951 CASE_FLT_FN (BUILT_IN_LCEIL):
14952 CASE_FLT_FN (BUILT_IN_LDEXP):
14953 CASE_FLT_FN (BUILT_IN_LFLOOR):
14954 CASE_FLT_FN (BUILT_IN_LLCEIL):
14955 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14956 CASE_FLT_FN (BUILT_IN_LLRINT):
14957 CASE_FLT_FN (BUILT_IN_LLROUND):
14958 CASE_FLT_FN (BUILT_IN_LRINT):
14959 CASE_FLT_FN (BUILT_IN_LROUND):
14960 CASE_FLT_FN (BUILT_IN_MODF):
14961 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14962 CASE_FLT_FN (BUILT_IN_RINT):
14963 CASE_FLT_FN (BUILT_IN_ROUND):
14964 CASE_FLT_FN (BUILT_IN_SCALB):
14965 CASE_FLT_FN (BUILT_IN_SCALBLN):
14966 CASE_FLT_FN (BUILT_IN_SCALBN):
14967 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14968 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14969 CASE_FLT_FN (BUILT_IN_SINH):
14970 CASE_FLT_FN (BUILT_IN_TANH):
14971 CASE_FLT_FN (BUILT_IN_TRUNC):
14972 /* True if the 1st argument is nonnegative. */
14973 return tree_expr_nonnegative_warnv_p (arg0,
14974 strict_overflow_p);
14975
14976 CASE_FLT_FN (BUILT_IN_FMAX):
14977 /* True if the 1st OR 2nd arguments are nonnegative. */
14978 return (tree_expr_nonnegative_warnv_p (arg0,
14979 strict_overflow_p)
14980 || (tree_expr_nonnegative_warnv_p (arg1,
14981 strict_overflow_p)));
14982
14983 CASE_FLT_FN (BUILT_IN_FMIN):
14984 /* True if the 1st AND 2nd arguments are nonnegative. */
14985 return (tree_expr_nonnegative_warnv_p (arg0,
14986 strict_overflow_p)
14987 && (tree_expr_nonnegative_warnv_p (arg1,
14988 strict_overflow_p)));
14989
14990 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14991 /* True if the 2nd argument is nonnegative. */
14992 return tree_expr_nonnegative_warnv_p (arg1,
14993 strict_overflow_p);
14994
14995 CASE_FLT_FN (BUILT_IN_POWI):
14996 /* True if the 1st argument is nonnegative or the second
14997 argument is an even integer. */
14998 if (TREE_CODE (arg1) == INTEGER_CST
14999 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15000 return true;
15001 return tree_expr_nonnegative_warnv_p (arg0,
15002 strict_overflow_p);
15003
15004 CASE_FLT_FN (BUILT_IN_POW):
15005 /* True if the 1st argument is nonnegative or the second
15006 argument is an even integer valued real. */
15007 if (TREE_CODE (arg1) == REAL_CST)
15008 {
15009 REAL_VALUE_TYPE c;
15010 HOST_WIDE_INT n;
15011
15012 c = TREE_REAL_CST (arg1);
15013 n = real_to_integer (&c);
15014 if ((n & 1) == 0)
15015 {
15016 REAL_VALUE_TYPE cint;
15017 real_from_integer (&cint, VOIDmode, n, SIGNED);
15018 if (real_identical (&c, &cint))
15019 return true;
15020 }
15021 }
15022 return tree_expr_nonnegative_warnv_p (arg0,
15023 strict_overflow_p);
15024
15025 default:
15026 break;
15027 }
15028 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15029 type);
15030 }
15031
15032 /* Return true if T is known to be non-negative. If the return
15033 value is based on the assumption that signed overflow is undefined,
15034 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15035 *STRICT_OVERFLOW_P. */
15036
15037 static bool
15038 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15039 {
15040 enum tree_code code = TREE_CODE (t);
15041 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15042 return true;
15043
15044 switch (code)
15045 {
15046 case TARGET_EXPR:
15047 {
15048 tree temp = TARGET_EXPR_SLOT (t);
15049 t = TARGET_EXPR_INITIAL (t);
15050
15051 /* If the initializer is non-void, then it's a normal expression
15052 that will be assigned to the slot. */
15053 if (!VOID_TYPE_P (t))
15054 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15055
15056 /* Otherwise, the initializer sets the slot in some way. One common
15057 way is an assignment statement at the end of the initializer. */
15058 while (1)
15059 {
15060 if (TREE_CODE (t) == BIND_EXPR)
15061 t = expr_last (BIND_EXPR_BODY (t));
15062 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15063 || TREE_CODE (t) == TRY_CATCH_EXPR)
15064 t = expr_last (TREE_OPERAND (t, 0));
15065 else if (TREE_CODE (t) == STATEMENT_LIST)
15066 t = expr_last (t);
15067 else
15068 break;
15069 }
15070 if (TREE_CODE (t) == MODIFY_EXPR
15071 && TREE_OPERAND (t, 0) == temp)
15072 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15073 strict_overflow_p);
15074
15075 return false;
15076 }
15077
15078 case CALL_EXPR:
15079 {
15080 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15081 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15082
15083 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15084 get_callee_fndecl (t),
15085 arg0,
15086 arg1,
15087 strict_overflow_p);
15088 }
15089 case COMPOUND_EXPR:
15090 case MODIFY_EXPR:
15091 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15092 strict_overflow_p);
15093 case BIND_EXPR:
15094 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15095 strict_overflow_p);
15096 case SAVE_EXPR:
15097 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15098 strict_overflow_p);
15099
15100 default:
15101 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15102 TREE_TYPE (t));
15103 }
15104
15105 /* We don't know sign of `t', so be conservative and return false. */
15106 return false;
15107 }
15108
15109 /* Return true if T is known to be non-negative. If the return
15110 value is based on the assumption that signed overflow is undefined,
15111 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15112 *STRICT_OVERFLOW_P. */
15113
15114 bool
15115 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15116 {
15117 enum tree_code code;
15118 if (t == error_mark_node)
15119 return false;
15120
15121 code = TREE_CODE (t);
15122 switch (TREE_CODE_CLASS (code))
15123 {
15124 case tcc_binary:
15125 case tcc_comparison:
15126 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15127 TREE_TYPE (t),
15128 TREE_OPERAND (t, 0),
15129 TREE_OPERAND (t, 1),
15130 strict_overflow_p);
15131
15132 case tcc_unary:
15133 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15134 TREE_TYPE (t),
15135 TREE_OPERAND (t, 0),
15136 strict_overflow_p);
15137
15138 case tcc_constant:
15139 case tcc_declaration:
15140 case tcc_reference:
15141 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15142
15143 default:
15144 break;
15145 }
15146
15147 switch (code)
15148 {
15149 case TRUTH_AND_EXPR:
15150 case TRUTH_OR_EXPR:
15151 case TRUTH_XOR_EXPR:
15152 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15153 TREE_TYPE (t),
15154 TREE_OPERAND (t, 0),
15155 TREE_OPERAND (t, 1),
15156 strict_overflow_p);
15157 case TRUTH_NOT_EXPR:
15158 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15159 TREE_TYPE (t),
15160 TREE_OPERAND (t, 0),
15161 strict_overflow_p);
15162
15163 case COND_EXPR:
15164 case CONSTRUCTOR:
15165 case OBJ_TYPE_REF:
15166 case ASSERT_EXPR:
15167 case ADDR_EXPR:
15168 case WITH_SIZE_EXPR:
15169 case SSA_NAME:
15170 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15171
15172 default:
15173 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15174 }
15175 }
15176
15177 /* Return true if `t' is known to be non-negative. Handle warnings
15178 about undefined signed overflow. */
15179
15180 bool
15181 tree_expr_nonnegative_p (tree t)
15182 {
15183 bool ret, strict_overflow_p;
15184
15185 strict_overflow_p = false;
15186 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15187 if (strict_overflow_p)
15188 fold_overflow_warning (("assuming signed overflow does not occur when "
15189 "determining that expression is always "
15190 "non-negative"),
15191 WARN_STRICT_OVERFLOW_MISC);
15192 return ret;
15193 }
15194
15195
15196 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15197 For floating point we further ensure that T is not denormal.
15198 Similar logic is present in nonzero_address in rtlanal.h.
15199
15200 If the return value is based on the assumption that signed overflow
15201 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15202 change *STRICT_OVERFLOW_P. */
15203
15204 bool
15205 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15206 bool *strict_overflow_p)
15207 {
15208 switch (code)
15209 {
15210 case ABS_EXPR:
15211 return tree_expr_nonzero_warnv_p (op0,
15212 strict_overflow_p);
15213
15214 case NOP_EXPR:
15215 {
15216 tree inner_type = TREE_TYPE (op0);
15217 tree outer_type = type;
15218
15219 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15220 && tree_expr_nonzero_warnv_p (op0,
15221 strict_overflow_p));
15222 }
15223 break;
15224
15225 case NON_LVALUE_EXPR:
15226 return tree_expr_nonzero_warnv_p (op0,
15227 strict_overflow_p);
15228
15229 default:
15230 break;
15231 }
15232
15233 return false;
15234 }
15235
15236 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15237 For floating point we further ensure that T is not denormal.
15238 Similar logic is present in nonzero_address in rtlanal.h.
15239
15240 If the return value is based on the assumption that signed overflow
15241 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15242 change *STRICT_OVERFLOW_P. */
15243
15244 bool
15245 tree_binary_nonzero_warnv_p (enum tree_code code,
15246 tree type,
15247 tree op0,
15248 tree op1, bool *strict_overflow_p)
15249 {
15250 bool sub_strict_overflow_p;
15251 switch (code)
15252 {
15253 case POINTER_PLUS_EXPR:
15254 case PLUS_EXPR:
15255 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15256 {
15257 /* With the presence of negative values it is hard
15258 to say something. */
15259 sub_strict_overflow_p = false;
15260 if (!tree_expr_nonnegative_warnv_p (op0,
15261 &sub_strict_overflow_p)
15262 || !tree_expr_nonnegative_warnv_p (op1,
15263 &sub_strict_overflow_p))
15264 return false;
15265 /* One of operands must be positive and the other non-negative. */
15266 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15267 overflows, on a twos-complement machine the sum of two
15268 nonnegative numbers can never be zero. */
15269 return (tree_expr_nonzero_warnv_p (op0,
15270 strict_overflow_p)
15271 || tree_expr_nonzero_warnv_p (op1,
15272 strict_overflow_p));
15273 }
15274 break;
15275
15276 case MULT_EXPR:
15277 if (TYPE_OVERFLOW_UNDEFINED (type))
15278 {
15279 if (tree_expr_nonzero_warnv_p (op0,
15280 strict_overflow_p)
15281 && tree_expr_nonzero_warnv_p (op1,
15282 strict_overflow_p))
15283 {
15284 *strict_overflow_p = true;
15285 return true;
15286 }
15287 }
15288 break;
15289
15290 case MIN_EXPR:
15291 sub_strict_overflow_p = false;
15292 if (tree_expr_nonzero_warnv_p (op0,
15293 &sub_strict_overflow_p)
15294 && tree_expr_nonzero_warnv_p (op1,
15295 &sub_strict_overflow_p))
15296 {
15297 if (sub_strict_overflow_p)
15298 *strict_overflow_p = true;
15299 }
15300 break;
15301
15302 case MAX_EXPR:
15303 sub_strict_overflow_p = false;
15304 if (tree_expr_nonzero_warnv_p (op0,
15305 &sub_strict_overflow_p))
15306 {
15307 if (sub_strict_overflow_p)
15308 *strict_overflow_p = true;
15309
15310 /* When both operands are nonzero, then MAX must be too. */
15311 if (tree_expr_nonzero_warnv_p (op1,
15312 strict_overflow_p))
15313 return true;
15314
15315 /* MAX where operand 0 is positive is positive. */
15316 return tree_expr_nonnegative_warnv_p (op0,
15317 strict_overflow_p);
15318 }
15319 /* MAX where operand 1 is positive is positive. */
15320 else if (tree_expr_nonzero_warnv_p (op1,
15321 &sub_strict_overflow_p)
15322 && tree_expr_nonnegative_warnv_p (op1,
15323 &sub_strict_overflow_p))
15324 {
15325 if (sub_strict_overflow_p)
15326 *strict_overflow_p = true;
15327 return true;
15328 }
15329 break;
15330
15331 case BIT_IOR_EXPR:
15332 return (tree_expr_nonzero_warnv_p (op1,
15333 strict_overflow_p)
15334 || tree_expr_nonzero_warnv_p (op0,
15335 strict_overflow_p));
15336
15337 default:
15338 break;
15339 }
15340
15341 return false;
15342 }
15343
15344 /* Return true when T is an address and is known to be nonzero.
15345 For floating point we further ensure that T is not denormal.
15346 Similar logic is present in nonzero_address in rtlanal.h.
15347
15348 If the return value is based on the assumption that signed overflow
15349 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15350 change *STRICT_OVERFLOW_P. */
15351
15352 bool
15353 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15354 {
15355 bool sub_strict_overflow_p;
15356 switch (TREE_CODE (t))
15357 {
15358 case INTEGER_CST:
15359 return !integer_zerop (t);
15360
15361 case ADDR_EXPR:
15362 {
15363 tree base = TREE_OPERAND (t, 0);
15364
15365 if (!DECL_P (base))
15366 base = get_base_address (base);
15367
15368 if (!base)
15369 return false;
15370
15371 /* For objects in symbol table check if we know they are non-zero.
15372 Don't do anything for variables and functions before symtab is built;
15373 it is quite possible that they will be declared weak later. */
15374 if (DECL_P (base) && decl_in_symtab_p (base))
15375 {
15376 struct symtab_node *symbol;
15377
15378 symbol = symtab_node::get_create (base);
15379 if (symbol)
15380 return symbol->nonzero_address ();
15381 else
15382 return false;
15383 }
15384
15385 /* Function local objects are never NULL. */
15386 if (DECL_P (base)
15387 && (DECL_CONTEXT (base)
15388 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15389 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15390 return true;
15391
15392 /* Constants are never weak. */
15393 if (CONSTANT_CLASS_P (base))
15394 return true;
15395
15396 return false;
15397 }
15398
15399 case COND_EXPR:
15400 sub_strict_overflow_p = false;
15401 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15402 &sub_strict_overflow_p)
15403 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15404 &sub_strict_overflow_p))
15405 {
15406 if (sub_strict_overflow_p)
15407 *strict_overflow_p = true;
15408 return true;
15409 }
15410 break;
15411
15412 default:
15413 break;
15414 }
15415 return false;
15416 }
15417
15418 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15419 attempt to fold the expression to a constant without modifying TYPE,
15420 OP0 or OP1.
15421
15422 If the expression could be simplified to a constant, then return
15423 the constant. If the expression would not be simplified to a
15424 constant, then return NULL_TREE. */
15425
15426 tree
15427 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15428 {
15429 tree tem = fold_binary (code, type, op0, op1);
15430 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15431 }
15432
15433 /* Given the components of a unary expression CODE, TYPE and OP0,
15434 attempt to fold the expression to a constant without modifying
15435 TYPE or OP0.
15436
15437 If the expression could be simplified to a constant, then return
15438 the constant. If the expression would not be simplified to a
15439 constant, then return NULL_TREE. */
15440
15441 tree
15442 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15443 {
15444 tree tem = fold_unary (code, type, op0);
15445 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15446 }
15447
15448 /* If EXP represents referencing an element in a constant string
15449 (either via pointer arithmetic or array indexing), return the
15450 tree representing the value accessed, otherwise return NULL. */
15451
15452 tree
15453 fold_read_from_constant_string (tree exp)
15454 {
15455 if ((TREE_CODE (exp) == INDIRECT_REF
15456 || TREE_CODE (exp) == ARRAY_REF)
15457 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15458 {
15459 tree exp1 = TREE_OPERAND (exp, 0);
15460 tree index;
15461 tree string;
15462 location_t loc = EXPR_LOCATION (exp);
15463
15464 if (TREE_CODE (exp) == INDIRECT_REF)
15465 string = string_constant (exp1, &index);
15466 else
15467 {
15468 tree low_bound = array_ref_low_bound (exp);
15469 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15470
15471 /* Optimize the special-case of a zero lower bound.
15472
15473 We convert the low_bound to sizetype to avoid some problems
15474 with constant folding. (E.g. suppose the lower bound is 1,
15475 and its mode is QI. Without the conversion,l (ARRAY
15476 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15477 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15478 if (! integer_zerop (low_bound))
15479 index = size_diffop_loc (loc, index,
15480 fold_convert_loc (loc, sizetype, low_bound));
15481
15482 string = exp1;
15483 }
15484
15485 if (string
15486 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15487 && TREE_CODE (string) == STRING_CST
15488 && TREE_CODE (index) == INTEGER_CST
15489 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15490 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15491 == MODE_INT)
15492 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15493 return build_int_cst_type (TREE_TYPE (exp),
15494 (TREE_STRING_POINTER (string)
15495 [TREE_INT_CST_LOW (index)]));
15496 }
15497 return NULL;
15498 }
15499
15500 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15501 an integer constant, real, or fixed-point constant.
15502
15503 TYPE is the type of the result. */
15504
15505 static tree
15506 fold_negate_const (tree arg0, tree type)
15507 {
15508 tree t = NULL_TREE;
15509
15510 switch (TREE_CODE (arg0))
15511 {
15512 case INTEGER_CST:
15513 {
15514 bool overflow;
15515 wide_int val = wi::neg (arg0, &overflow);
15516 t = force_fit_type (type, val, 1,
15517 (overflow | TREE_OVERFLOW (arg0))
15518 && !TYPE_UNSIGNED (type));
15519 break;
15520 }
15521
15522 case REAL_CST:
15523 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15524 break;
15525
15526 case FIXED_CST:
15527 {
15528 FIXED_VALUE_TYPE f;
15529 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15530 &(TREE_FIXED_CST (arg0)), NULL,
15531 TYPE_SATURATING (type));
15532 t = build_fixed (type, f);
15533 /* Propagate overflow flags. */
15534 if (overflow_p | TREE_OVERFLOW (arg0))
15535 TREE_OVERFLOW (t) = 1;
15536 break;
15537 }
15538
15539 default:
15540 gcc_unreachable ();
15541 }
15542
15543 return t;
15544 }
15545
15546 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15547 an integer constant or real constant.
15548
15549 TYPE is the type of the result. */
15550
15551 tree
15552 fold_abs_const (tree arg0, tree type)
15553 {
15554 tree t = NULL_TREE;
15555
15556 switch (TREE_CODE (arg0))
15557 {
15558 case INTEGER_CST:
15559 {
15560 /* If the value is unsigned or non-negative, then the absolute value
15561 is the same as the ordinary value. */
15562 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15563 t = arg0;
15564
15565 /* If the value is negative, then the absolute value is
15566 its negation. */
15567 else
15568 {
15569 bool overflow;
15570 wide_int val = wi::neg (arg0, &overflow);
15571 t = force_fit_type (type, val, -1,
15572 overflow | TREE_OVERFLOW (arg0));
15573 }
15574 }
15575 break;
15576
15577 case REAL_CST:
15578 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15579 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15580 else
15581 t = arg0;
15582 break;
15583
15584 default:
15585 gcc_unreachable ();
15586 }
15587
15588 return t;
15589 }
15590
15591 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15592 constant. TYPE is the type of the result. */
15593
15594 static tree
15595 fold_not_const (const_tree arg0, tree type)
15596 {
15597 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15598
15599 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15600 }
15601
15602 /* Given CODE, a relational operator, the target type, TYPE and two
15603 constant operands OP0 and OP1, return the result of the
15604 relational operation. If the result is not a compile time
15605 constant, then return NULL_TREE. */
15606
15607 static tree
15608 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15609 {
15610 int result, invert;
15611
15612 /* From here on, the only cases we handle are when the result is
15613 known to be a constant. */
15614
15615 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15616 {
15617 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15618 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15619
15620 /* Handle the cases where either operand is a NaN. */
15621 if (real_isnan (c0) || real_isnan (c1))
15622 {
15623 switch (code)
15624 {
15625 case EQ_EXPR:
15626 case ORDERED_EXPR:
15627 result = 0;
15628 break;
15629
15630 case NE_EXPR:
15631 case UNORDERED_EXPR:
15632 case UNLT_EXPR:
15633 case UNLE_EXPR:
15634 case UNGT_EXPR:
15635 case UNGE_EXPR:
15636 case UNEQ_EXPR:
15637 result = 1;
15638 break;
15639
15640 case LT_EXPR:
15641 case LE_EXPR:
15642 case GT_EXPR:
15643 case GE_EXPR:
15644 case LTGT_EXPR:
15645 if (flag_trapping_math)
15646 return NULL_TREE;
15647 result = 0;
15648 break;
15649
15650 default:
15651 gcc_unreachable ();
15652 }
15653
15654 return constant_boolean_node (result, type);
15655 }
15656
15657 return constant_boolean_node (real_compare (code, c0, c1), type);
15658 }
15659
15660 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15661 {
15662 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15663 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15664 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15665 }
15666
15667 /* Handle equality/inequality of complex constants. */
15668 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15669 {
15670 tree rcond = fold_relational_const (code, type,
15671 TREE_REALPART (op0),
15672 TREE_REALPART (op1));
15673 tree icond = fold_relational_const (code, type,
15674 TREE_IMAGPART (op0),
15675 TREE_IMAGPART (op1));
15676 if (code == EQ_EXPR)
15677 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15678 else if (code == NE_EXPR)
15679 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15680 else
15681 return NULL_TREE;
15682 }
15683
15684 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15685 {
15686 unsigned count = VECTOR_CST_NELTS (op0);
15687 tree *elts = XALLOCAVEC (tree, count);
15688 gcc_assert (VECTOR_CST_NELTS (op1) == count
15689 && TYPE_VECTOR_SUBPARTS (type) == count);
15690
15691 for (unsigned i = 0; i < count; i++)
15692 {
15693 tree elem_type = TREE_TYPE (type);
15694 tree elem0 = VECTOR_CST_ELT (op0, i);
15695 tree elem1 = VECTOR_CST_ELT (op1, i);
15696
15697 tree tem = fold_relational_const (code, elem_type,
15698 elem0, elem1);
15699
15700 if (tem == NULL_TREE)
15701 return NULL_TREE;
15702
15703 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15704 }
15705
15706 return build_vector (type, elts);
15707 }
15708
15709 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15710
15711 To compute GT, swap the arguments and do LT.
15712 To compute GE, do LT and invert the result.
15713 To compute LE, swap the arguments, do LT and invert the result.
15714 To compute NE, do EQ and invert the result.
15715
15716 Therefore, the code below must handle only EQ and LT. */
15717
15718 if (code == LE_EXPR || code == GT_EXPR)
15719 {
15720 tree tem = op0;
15721 op0 = op1;
15722 op1 = tem;
15723 code = swap_tree_comparison (code);
15724 }
15725
15726 /* Note that it is safe to invert for real values here because we
15727 have already handled the one case that it matters. */
15728
15729 invert = 0;
15730 if (code == NE_EXPR || code == GE_EXPR)
15731 {
15732 invert = 1;
15733 code = invert_tree_comparison (code, false);
15734 }
15735
15736 /* Compute a result for LT or EQ if args permit;
15737 Otherwise return T. */
15738 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15739 {
15740 if (code == EQ_EXPR)
15741 result = tree_int_cst_equal (op0, op1);
15742 else
15743 result = tree_int_cst_lt (op0, op1);
15744 }
15745 else
15746 return NULL_TREE;
15747
15748 if (invert)
15749 result ^= 1;
15750 return constant_boolean_node (result, type);
15751 }
15752
15753 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15754 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15755 itself. */
15756
15757 tree
15758 fold_build_cleanup_point_expr (tree type, tree expr)
15759 {
15760 /* If the expression does not have side effects then we don't have to wrap
15761 it with a cleanup point expression. */
15762 if (!TREE_SIDE_EFFECTS (expr))
15763 return expr;
15764
15765 /* If the expression is a return, check to see if the expression inside the
15766 return has no side effects or the right hand side of the modify expression
15767 inside the return. If either don't have side effects set we don't need to
15768 wrap the expression in a cleanup point expression. Note we don't check the
15769 left hand side of the modify because it should always be a return decl. */
15770 if (TREE_CODE (expr) == RETURN_EXPR)
15771 {
15772 tree op = TREE_OPERAND (expr, 0);
15773 if (!op || !TREE_SIDE_EFFECTS (op))
15774 return expr;
15775 op = TREE_OPERAND (op, 1);
15776 if (!TREE_SIDE_EFFECTS (op))
15777 return expr;
15778 }
15779
15780 return build1 (CLEANUP_POINT_EXPR, type, expr);
15781 }
15782
15783 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15784 of an indirection through OP0, or NULL_TREE if no simplification is
15785 possible. */
15786
15787 tree
15788 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15789 {
15790 tree sub = op0;
15791 tree subtype;
15792
15793 STRIP_NOPS (sub);
15794 subtype = TREE_TYPE (sub);
15795 if (!POINTER_TYPE_P (subtype))
15796 return NULL_TREE;
15797
15798 if (TREE_CODE (sub) == ADDR_EXPR)
15799 {
15800 tree op = TREE_OPERAND (sub, 0);
15801 tree optype = TREE_TYPE (op);
15802 /* *&CONST_DECL -> to the value of the const decl. */
15803 if (TREE_CODE (op) == CONST_DECL)
15804 return DECL_INITIAL (op);
15805 /* *&p => p; make sure to handle *&"str"[cst] here. */
15806 if (type == optype)
15807 {
15808 tree fop = fold_read_from_constant_string (op);
15809 if (fop)
15810 return fop;
15811 else
15812 return op;
15813 }
15814 /* *(foo *)&fooarray => fooarray[0] */
15815 else if (TREE_CODE (optype) == ARRAY_TYPE
15816 && type == TREE_TYPE (optype)
15817 && (!in_gimple_form
15818 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15819 {
15820 tree type_domain = TYPE_DOMAIN (optype);
15821 tree min_val = size_zero_node;
15822 if (type_domain && TYPE_MIN_VALUE (type_domain))
15823 min_val = TYPE_MIN_VALUE (type_domain);
15824 if (in_gimple_form
15825 && TREE_CODE (min_val) != INTEGER_CST)
15826 return NULL_TREE;
15827 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15828 NULL_TREE, NULL_TREE);
15829 }
15830 /* *(foo *)&complexfoo => __real__ complexfoo */
15831 else if (TREE_CODE (optype) == COMPLEX_TYPE
15832 && type == TREE_TYPE (optype))
15833 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15834 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15835 else if (TREE_CODE (optype) == VECTOR_TYPE
15836 && type == TREE_TYPE (optype))
15837 {
15838 tree part_width = TYPE_SIZE (type);
15839 tree index = bitsize_int (0);
15840 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15841 }
15842 }
15843
15844 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15845 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15846 {
15847 tree op00 = TREE_OPERAND (sub, 0);
15848 tree op01 = TREE_OPERAND (sub, 1);
15849
15850 STRIP_NOPS (op00);
15851 if (TREE_CODE (op00) == ADDR_EXPR)
15852 {
15853 tree op00type;
15854 op00 = TREE_OPERAND (op00, 0);
15855 op00type = TREE_TYPE (op00);
15856
15857 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15858 if (TREE_CODE (op00type) == VECTOR_TYPE
15859 && type == TREE_TYPE (op00type))
15860 {
15861 HOST_WIDE_INT offset = tree_to_shwi (op01);
15862 tree part_width = TYPE_SIZE (type);
15863 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15864 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15865 tree index = bitsize_int (indexi);
15866
15867 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15868 return fold_build3_loc (loc,
15869 BIT_FIELD_REF, type, op00,
15870 part_width, index);
15871
15872 }
15873 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15874 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15875 && type == TREE_TYPE (op00type))
15876 {
15877 tree size = TYPE_SIZE_UNIT (type);
15878 if (tree_int_cst_equal (size, op01))
15879 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15880 }
15881 /* ((foo *)&fooarray)[1] => fooarray[1] */
15882 else if (TREE_CODE (op00type) == ARRAY_TYPE
15883 && type == TREE_TYPE (op00type))
15884 {
15885 tree type_domain = TYPE_DOMAIN (op00type);
15886 tree min_val = size_zero_node;
15887 if (type_domain && TYPE_MIN_VALUE (type_domain))
15888 min_val = TYPE_MIN_VALUE (type_domain);
15889 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15890 TYPE_SIZE_UNIT (type));
15891 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15892 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15893 NULL_TREE, NULL_TREE);
15894 }
15895 }
15896 }
15897
15898 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15899 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15900 && type == TREE_TYPE (TREE_TYPE (subtype))
15901 && (!in_gimple_form
15902 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15903 {
15904 tree type_domain;
15905 tree min_val = size_zero_node;
15906 sub = build_fold_indirect_ref_loc (loc, sub);
15907 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15908 if (type_domain && TYPE_MIN_VALUE (type_domain))
15909 min_val = TYPE_MIN_VALUE (type_domain);
15910 if (in_gimple_form
15911 && TREE_CODE (min_val) != INTEGER_CST)
15912 return NULL_TREE;
15913 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15914 NULL_TREE);
15915 }
15916
15917 return NULL_TREE;
15918 }
15919
15920 /* Builds an expression for an indirection through T, simplifying some
15921 cases. */
15922
15923 tree
15924 build_fold_indirect_ref_loc (location_t loc, tree t)
15925 {
15926 tree type = TREE_TYPE (TREE_TYPE (t));
15927 tree sub = fold_indirect_ref_1 (loc, type, t);
15928
15929 if (sub)
15930 return sub;
15931
15932 return build1_loc (loc, INDIRECT_REF, type, t);
15933 }
15934
15935 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15936
15937 tree
15938 fold_indirect_ref_loc (location_t loc, tree t)
15939 {
15940 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15941
15942 if (sub)
15943 return sub;
15944 else
15945 return t;
15946 }
15947
15948 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15949 whose result is ignored. The type of the returned tree need not be
15950 the same as the original expression. */
15951
15952 tree
15953 fold_ignored_result (tree t)
15954 {
15955 if (!TREE_SIDE_EFFECTS (t))
15956 return integer_zero_node;
15957
15958 for (;;)
15959 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15960 {
15961 case tcc_unary:
15962 t = TREE_OPERAND (t, 0);
15963 break;
15964
15965 case tcc_binary:
15966 case tcc_comparison:
15967 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15968 t = TREE_OPERAND (t, 0);
15969 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15970 t = TREE_OPERAND (t, 1);
15971 else
15972 return t;
15973 break;
15974
15975 case tcc_expression:
15976 switch (TREE_CODE (t))
15977 {
15978 case COMPOUND_EXPR:
15979 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15980 return t;
15981 t = TREE_OPERAND (t, 0);
15982 break;
15983
15984 case COND_EXPR:
15985 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15986 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15987 return t;
15988 t = TREE_OPERAND (t, 0);
15989 break;
15990
15991 default:
15992 return t;
15993 }
15994 break;
15995
15996 default:
15997 return t;
15998 }
15999 }
16000
16001 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16002
16003 tree
16004 round_up_loc (location_t loc, tree value, unsigned int divisor)
16005 {
16006 tree div = NULL_TREE;
16007
16008 if (divisor == 1)
16009 return value;
16010
16011 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16012 have to do anything. Only do this when we are not given a const,
16013 because in that case, this check is more expensive than just
16014 doing it. */
16015 if (TREE_CODE (value) != INTEGER_CST)
16016 {
16017 div = build_int_cst (TREE_TYPE (value), divisor);
16018
16019 if (multiple_of_p (TREE_TYPE (value), value, div))
16020 return value;
16021 }
16022
16023 /* If divisor is a power of two, simplify this to bit manipulation. */
16024 if (divisor == (divisor & -divisor))
16025 {
16026 if (TREE_CODE (value) == INTEGER_CST)
16027 {
16028 wide_int val = value;
16029 bool overflow_p;
16030
16031 if ((val & (divisor - 1)) == 0)
16032 return value;
16033
16034 overflow_p = TREE_OVERFLOW (value);
16035 val += divisor - 1;
16036 val &= - (int) divisor;
16037 if (val == 0)
16038 overflow_p = true;
16039
16040 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16041 }
16042 else
16043 {
16044 tree t;
16045
16046 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16047 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16048 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16049 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16050 }
16051 }
16052 else
16053 {
16054 if (!div)
16055 div = build_int_cst (TREE_TYPE (value), divisor);
16056 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16057 value = size_binop_loc (loc, MULT_EXPR, value, div);
16058 }
16059
16060 return value;
16061 }
16062
16063 /* Likewise, but round down. */
16064
16065 tree
16066 round_down_loc (location_t loc, tree value, int divisor)
16067 {
16068 tree div = NULL_TREE;
16069
16070 gcc_assert (divisor > 0);
16071 if (divisor == 1)
16072 return value;
16073
16074 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16075 have to do anything. Only do this when we are not given a const,
16076 because in that case, this check is more expensive than just
16077 doing it. */
16078 if (TREE_CODE (value) != INTEGER_CST)
16079 {
16080 div = build_int_cst (TREE_TYPE (value), divisor);
16081
16082 if (multiple_of_p (TREE_TYPE (value), value, div))
16083 return value;
16084 }
16085
16086 /* If divisor is a power of two, simplify this to bit manipulation. */
16087 if (divisor == (divisor & -divisor))
16088 {
16089 tree t;
16090
16091 t = build_int_cst (TREE_TYPE (value), -divisor);
16092 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16093 }
16094 else
16095 {
16096 if (!div)
16097 div = build_int_cst (TREE_TYPE (value), divisor);
16098 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16099 value = size_binop_loc (loc, MULT_EXPR, value, div);
16100 }
16101
16102 return value;
16103 }
16104
16105 /* Returns the pointer to the base of the object addressed by EXP and
16106 extracts the information about the offset of the access, storing it
16107 to PBITPOS and POFFSET. */
16108
16109 static tree
16110 split_address_to_core_and_offset (tree exp,
16111 HOST_WIDE_INT *pbitpos, tree *poffset)
16112 {
16113 tree core;
16114 machine_mode mode;
16115 int unsignedp, volatilep;
16116 HOST_WIDE_INT bitsize;
16117 location_t loc = EXPR_LOCATION (exp);
16118
16119 if (TREE_CODE (exp) == ADDR_EXPR)
16120 {
16121 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16122 poffset, &mode, &unsignedp, &volatilep,
16123 false);
16124 core = build_fold_addr_expr_loc (loc, core);
16125 }
16126 else
16127 {
16128 core = exp;
16129 *pbitpos = 0;
16130 *poffset = NULL_TREE;
16131 }
16132
16133 return core;
16134 }
16135
16136 /* Returns true if addresses of E1 and E2 differ by a constant, false
16137 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16138
16139 bool
16140 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16141 {
16142 tree core1, core2;
16143 HOST_WIDE_INT bitpos1, bitpos2;
16144 tree toffset1, toffset2, tdiff, type;
16145
16146 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16147 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16148
16149 if (bitpos1 % BITS_PER_UNIT != 0
16150 || bitpos2 % BITS_PER_UNIT != 0
16151 || !operand_equal_p (core1, core2, 0))
16152 return false;
16153
16154 if (toffset1 && toffset2)
16155 {
16156 type = TREE_TYPE (toffset1);
16157 if (type != TREE_TYPE (toffset2))
16158 toffset2 = fold_convert (type, toffset2);
16159
16160 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16161 if (!cst_and_fits_in_hwi (tdiff))
16162 return false;
16163
16164 *diff = int_cst_value (tdiff);
16165 }
16166 else if (toffset1 || toffset2)
16167 {
16168 /* If only one of the offsets is non-constant, the difference cannot
16169 be a constant. */
16170 return false;
16171 }
16172 else
16173 *diff = 0;
16174
16175 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16176 return true;
16177 }
16178
16179 /* Simplify the floating point expression EXP when the sign of the
16180 result is not significant. Return NULL_TREE if no simplification
16181 is possible. */
16182
16183 tree
16184 fold_strip_sign_ops (tree exp)
16185 {
16186 tree arg0, arg1;
16187 location_t loc = EXPR_LOCATION (exp);
16188
16189 switch (TREE_CODE (exp))
16190 {
16191 case ABS_EXPR:
16192 case NEGATE_EXPR:
16193 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16194 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16195
16196 case MULT_EXPR:
16197 case RDIV_EXPR:
16198 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16199 return NULL_TREE;
16200 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16201 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16202 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16203 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16204 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16205 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16206 break;
16207
16208 case COMPOUND_EXPR:
16209 arg0 = TREE_OPERAND (exp, 0);
16210 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16211 if (arg1)
16212 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16213 break;
16214
16215 case COND_EXPR:
16216 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16217 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16218 if (arg0 || arg1)
16219 return fold_build3_loc (loc,
16220 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16221 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16222 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16223 break;
16224
16225 case CALL_EXPR:
16226 {
16227 const enum built_in_function fcode = builtin_mathfn_code (exp);
16228 switch (fcode)
16229 {
16230 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16231 /* Strip copysign function call, return the 1st argument. */
16232 arg0 = CALL_EXPR_ARG (exp, 0);
16233 arg1 = CALL_EXPR_ARG (exp, 1);
16234 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16235
16236 default:
16237 /* Strip sign ops from the argument of "odd" math functions. */
16238 if (negate_mathfn_p (fcode))
16239 {
16240 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16241 if (arg0)
16242 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16243 }
16244 break;
16245 }
16246 }
16247 break;
16248
16249 default:
16250 break;
16251 }
16252 return NULL_TREE;
16253 }
16254
16255 /* Return OFF converted to a pointer offset type suitable as offset for
16256 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16257 tree
16258 convert_to_ptrofftype_loc (location_t loc, tree off)
16259 {
16260 return fold_convert_loc (loc, sizetype, off);
16261 }
16262
16263 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16264 tree
16265 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16266 {
16267 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16268 ptr, convert_to_ptrofftype_loc (loc, off));
16269 }
16270
16271 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16272 tree
16273 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16274 {
16275 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16276 ptr, size_int (off));
16277 }