Use std::swap instead of explicit swaps
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
102
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
106
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
127 };
128
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
176
177
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
180
181 static location_t
182 expr_location_or (tree t, location_t loc)
183 {
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
186 }
187
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
190
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
193 {
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
199 {
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
202 }
203 return x;
204 }
205 \f
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
209
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
212 {
213 widest_int quo;
214
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
218
219 return NULL_TREE;
220 }
221 \f
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
230
231 static int fold_deferring_overflow_warnings;
232
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
237
238 static const char* fold_deferred_overflow_warning;
239
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
242
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
244
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
247
248 void
249 fold_defer_overflow_warnings (void)
250 {
251 ++fold_deferring_overflow_warnings;
252 }
253
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
262
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
265 {
266 const char *warnmsg;
267 location_t locus;
268
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
272 {
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
278 }
279
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
282
283 if (!issue || warnmsg == NULL)
284 return;
285
286 if (gimple_no_warning_p (stmt))
287 return;
288
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
293
294 if (!issue_strict_overflow_warning (code))
295 return;
296
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
302 }
303
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
306
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
309 {
310 fold_undefer_overflow_warnings (false, NULL, 0);
311 }
312
313 /* Whether we are deferring overflow warnings. */
314
315 bool
316 fold_deferring_overflow_warnings_p (void)
317 {
318 return fold_deferring_overflow_warnings > 0;
319 }
320
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
323
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
326 {
327 if (fold_deferring_overflow_warnings > 0)
328 {
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
331 {
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
334 }
335 }
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
338 }
339 \f
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
342
343 static bool
344 negate_mathfn_p (enum built_in_function code)
345 {
346 switch (code)
347 {
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
372
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
378
379 default:
380 break;
381 }
382 return false;
383 }
384
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
387
388 bool
389 may_negate_without_overflow_p (const_tree t)
390 {
391 tree type;
392
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
394
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
398
399 return !wi::only_sign_bit_p (t);
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 return true;
430
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
433
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
442
443 case VECTOR_CST:
444 {
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
447
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
449
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
453
454 return true;
455 }
456
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
460
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
463
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
475
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
482
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
486
487 /* Fall through. */
488
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
494
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
504 {
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
517 }
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
521
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
525 {
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
529 }
530 break;
531
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
537
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
541 {
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
545 }
546 break;
547
548 default:
549 break;
550 }
551 return false;
552 }
553
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
558
559 static tree
560 fold_negate_expr (location_t loc, tree t)
561 {
562 tree type = TREE_TYPE (t);
563 tree tem;
564
565 switch (TREE_CODE (t))
566 {
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
573
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
583
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
587
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
591
592 case COMPLEX_CST:
593 {
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
598 }
599 break;
600
601 case VECTOR_CST:
602 {
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
605
606 for (i = 0; i < count; i++)
607 {
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
611 }
612
613 return build_vector (type, elts);
614 }
615
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
622
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
628
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
633
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
637 {
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
642 {
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
646 }
647
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
650 {
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
654 }
655 }
656 break;
657
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
666
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670
671 /* Fall through. */
672
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
675 {
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
684 }
685 break;
686
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
696 {
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
701 {
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
708 }
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
723 }
724 break;
725
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
729 {
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
733 }
734 break;
735
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
740 {
741 tree fndecl, arg;
742
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
746 }
747 break;
748
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
752 {
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
986 {
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
992
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1019 }
1020
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1029
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1033 {
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1039 }
1040
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1046
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1062
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1087
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1093
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1099
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1105
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1111
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1115
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1119
1120 default:
1121 return NULL_TREE;
1122 }
1123
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1128
1129 return t;
1130 }
1131
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1134 {
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1136 }
1137
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1142
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1145 {
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1149
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1152
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1154 {
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1158
1159 return int_const_binop (code, arg1, arg2);
1160 }
1161
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1163 {
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1171
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1174 {
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1182
1183 default:
1184 return NULL_TREE;
1185 }
1186
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1189
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1192
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1198
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1205
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1212
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1215
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1224
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1233
1234 t = build_real (type, result);
1235
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1238 }
1239
1240 if (TREE_CODE (arg1) == FIXED_CST)
1241 {
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1248
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1251 {
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1260
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1263 {
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1270 }
1271 break;
1272
1273 default:
1274 return NULL_TREE;
1275 }
1276
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1286 }
1287
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1289 {
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1296
1297 switch (code)
1298 {
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1304
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1310
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1318
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1330 {
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1333
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1337 */
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1350
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1353 }
1354 else
1355 {
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1358
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1364
1365 if (integer_nonzerop (compare))
1366 {
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1380
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1384 }
1385 else
1386 {
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1397
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1401
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1405 }
1406 }
1407 break;
1408
1409 default:
1410 return NULL_TREE;
1411 }
1412
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1415 }
1416
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1419 {
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1423
1424 for (i = 0; i < count; i++)
1425 {
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1428
1429 elts[i] = const_binop (code, elem1, elem2);
1430
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1435 }
1436
1437 return build_vector (type, elts);
1438 }
1439
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1443 {
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1447
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 elts[i] = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1488 {
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1491
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1497
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1502
1503 for (i = 0; i < nelts; i++)
1504 {
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1510 }
1511
1512 return build_vector (type, elts);
1513 }
1514
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1519 {
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1523
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1528
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1533
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1542
1543 for (out = 0; out < nelts; out++)
1544 {
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1548
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1551
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1557 }
1558
1559 return build_vector (type, elts);
1560 }
1561
1562 default:;
1563 }
1564
1565 if (TREE_CODE_CLASS (code) != tcc_binary)
1566 return NULL_TREE;
1567
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type)
1570 == TYPE_SATURATING (TREE_TYPE (arg1)));
1571
1572 return const_binop (code, arg1, arg2);
1573 }
1574
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1577
1578 tree
1579 const_unop (enum tree_code code, tree type, tree arg0)
1580 {
1581 switch (code)
1582 {
1583 CASE_CONVERT:
1584 case FLOAT_EXPR:
1585 case FIX_TRUNC_EXPR:
1586 case FIXED_CONVERT_EXPR:
1587 return fold_convert_const (code, type, arg0);
1588
1589 case ADDR_SPACE_CONVERT_EXPR:
1590 if (integer_zerop (arg0))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1593
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1596
1597 case NEGATE_EXPR:
1598 {
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1606 }
1607
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1612
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 {
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1619 }
1620 break;
1621
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1627 {
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1631
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1634 {
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1640 }
1641 if (i == count)
1642 return build_vector (type, elements);
1643 }
1644 break;
1645
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1650
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1655
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1660
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1665 {
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1669
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1673
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1677
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1681
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1686
1687 for (i = 0; i < nelts; i++)
1688 {
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1692 }
1693
1694 return build_vector (type, elts);
1695 }
1696
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1700 {
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1704
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1708
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1712
1713 switch (code)
1714 {
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1719 }
1720
1721 for (i = 1; i < nelts; i++)
1722 {
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1726 }
1727
1728 return elts[0];
1729 }
1730
1731 default:
1732 break;
1733 }
1734
1735 return NULL_TREE;
1736 }
1737
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1740
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1743 {
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1745 }
1746 \f
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1751
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1754 {
1755 tree type = TREE_TYPE (arg0);
1756
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1759
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1762
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1765 {
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1768 {
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1773 }
1774 else if (code == MINUS_EXPR)
1775 {
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1778 }
1779 else if (code == MULT_EXPR)
1780 {
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1783 }
1784
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1789 }
1790
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1792 }
1793
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1797
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1800 {
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1803
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1806
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1810
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1817
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1825
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1841 }
1842 \f
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1845
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1848 {
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1855 }
1856
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1859
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1862 {
1863 bool overflow = false;
1864 tree t;
1865
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1874
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1878
1879 switch (code)
1880 {
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1884
1885 default:
1886 gcc_unreachable ();
1887 }
1888
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1891 {
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1894 }
1895
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1898
1899 if (! overflow)
1900 {
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (REAL_VALUES_LESS (r, l))
1904 {
1905 overflow = true;
1906 val = lt;
1907 }
1908 }
1909
1910 if (! overflow)
1911 {
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1914 {
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (REAL_VALUES_LESS (u, r))
1917 {
1918 overflow = true;
1919 val = ut;
1920 }
1921 }
1922 }
1923
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1926
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1929 }
1930
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1933
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1936 {
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1940
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1945 {
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1949
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1954 }
1955 else
1956 {
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1959 }
1960
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1967
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1975
1976 return t;
1977 }
1978
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1981
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1984 {
1985 REAL_VALUE_TYPE value;
1986 tree t;
1987
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1990
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1997 TREE_OVERFLOW (t) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_NANS (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2004 && REAL_VALUE_ISINF (value)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2006 TREE_OVERFLOW (t) = 1;
2007 else
2008 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 return t;
2010 }
2011
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2014
2015 static tree
2016 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2017 {
2018 REAL_VALUE_TYPE value;
2019 tree t;
2020
2021 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2022 t = build_real (type, value);
2023
2024 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 return t;
2026 }
2027
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2030
2031 static tree
2032 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2033 {
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037
2038 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2039 TYPE_SATURATING (type));
2040 t = build_fixed (type, value);
2041
2042 /* Propagate overflow flags. */
2043 if (overflow_p | TREE_OVERFLOW (arg1))
2044 TREE_OVERFLOW (t) = 1;
2045 return t;
2046 }
2047
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2050
2051 static tree
2052 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2053 {
2054 FIXED_VALUE_TYPE value;
2055 tree t;
2056 bool overflow_p;
2057 double_int di;
2058
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2060
2061 di.low = TREE_INT_CST_ELT (arg1, 0);
2062 if (TREE_INT_CST_NUNITS (arg1) == 1)
2063 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2064 else
2065 di.high = TREE_INT_CST_ELT (arg1, 1);
2066
2067 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2071
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2076 }
2077
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2080
2081 static tree
2082 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2083 {
2084 FIXED_VALUE_TYPE value;
2085 tree t;
2086 bool overflow_p;
2087
2088 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2089 &TREE_REAL_CST (arg1),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2092
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2097 }
2098
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2101
2102 static tree
2103 fold_convert_const (enum tree_code code, tree type, tree arg1)
2104 {
2105 if (TREE_TYPE (arg1) == type)
2106 return arg1;
2107
2108 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2109 || TREE_CODE (type) == OFFSET_TYPE)
2110 {
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_int_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_int_from_real (code, type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_int_from_fixed (type, arg1);
2117 }
2118 else if (TREE_CODE (type) == REAL_TYPE)
2119 {
2120 if (TREE_CODE (arg1) == INTEGER_CST)
2121 return build_real_from_int_cst (type, arg1);
2122 else if (TREE_CODE (arg1) == REAL_CST)
2123 return fold_convert_const_real_from_real (type, arg1);
2124 else if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_real_from_fixed (type, arg1);
2126 }
2127 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2128 {
2129 if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_fixed_from_fixed (type, arg1);
2131 else if (TREE_CODE (arg1) == INTEGER_CST)
2132 return fold_convert_const_fixed_from_int (type, arg1);
2133 else if (TREE_CODE (arg1) == REAL_CST)
2134 return fold_convert_const_fixed_from_real (type, arg1);
2135 }
2136 return NULL_TREE;
2137 }
2138
2139 /* Construct a vector of zero elements of vector type TYPE. */
2140
2141 static tree
2142 build_zero_vector (tree type)
2143 {
2144 tree t;
2145
2146 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2147 return build_vector_from_val (type, t);
2148 }
2149
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2151
2152 bool
2153 fold_convertible_p (const_tree type, const_tree arg)
2154 {
2155 tree orig = TREE_TYPE (arg);
2156
2157 if (type == orig)
2158 return true;
2159
2160 if (TREE_CODE (arg) == ERROR_MARK
2161 || TREE_CODE (type) == ERROR_MARK
2162 || TREE_CODE (orig) == ERROR_MARK)
2163 return false;
2164
2165 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2166 return true;
2167
2168 switch (TREE_CODE (type))
2169 {
2170 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2171 case POINTER_TYPE: case REFERENCE_TYPE:
2172 case OFFSET_TYPE:
2173 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == OFFSET_TYPE)
2175 return true;
2176 return (TREE_CODE (orig) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2178
2179 case REAL_TYPE:
2180 case FIXED_POINT_TYPE:
2181 case COMPLEX_TYPE:
2182 case VECTOR_TYPE:
2183 case VOID_TYPE:
2184 return TREE_CODE (type) == TREE_CODE (orig);
2185
2186 default:
2187 return false;
2188 }
2189 }
2190
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2193
2194 tree
2195 fold_convert_loc (location_t loc, tree type, tree arg)
2196 {
2197 tree orig = TREE_TYPE (arg);
2198 tree tem;
2199
2200 if (type == orig)
2201 return arg;
2202
2203 if (TREE_CODE (arg) == ERROR_MARK
2204 || TREE_CODE (type) == ERROR_MARK
2205 || TREE_CODE (orig) == ERROR_MARK)
2206 return error_mark_node;
2207
2208 switch (TREE_CODE (type))
2209 {
2210 case POINTER_TYPE:
2211 case REFERENCE_TYPE:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2216 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2217 /* fall through */
2218
2219 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2220 case OFFSET_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2222 {
2223 tem = fold_convert_const (NOP_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2226 }
2227 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2228 || TREE_CODE (orig) == OFFSET_TYPE)
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 if (TREE_CODE (orig) == COMPLEX_TYPE)
2231 return fold_convert_loc (loc, type,
2232 fold_build1_loc (loc, REALPART_EXPR,
2233 TREE_TYPE (orig), arg));
2234 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2236 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2237
2238 case REAL_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2240 {
2241 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2244 }
2245 else if (TREE_CODE (arg) == REAL_CST)
2246 {
2247 tem = fold_convert_const (NOP_EXPR, type, arg);
2248 if (tem != NULL_TREE)
2249 return tem;
2250 }
2251 else if (TREE_CODE (arg) == FIXED_CST)
2252 {
2253 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257
2258 switch (TREE_CODE (orig))
2259 {
2260 case INTEGER_TYPE:
2261 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2262 case POINTER_TYPE: case REFERENCE_TYPE:
2263 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2264
2265 case REAL_TYPE:
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2267
2268 case FIXED_POINT_TYPE:
2269 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2270
2271 case COMPLEX_TYPE:
2272 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2273 return fold_convert_loc (loc, type, tem);
2274
2275 default:
2276 gcc_unreachable ();
2277 }
2278
2279 case FIXED_POINT_TYPE:
2280 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2281 || TREE_CODE (arg) == REAL_CST)
2282 {
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 goto fold_convert_exit;
2286 }
2287
2288 switch (TREE_CODE (orig))
2289 {
2290 case FIXED_POINT_TYPE:
2291 case INTEGER_TYPE:
2292 case ENUMERAL_TYPE:
2293 case BOOLEAN_TYPE:
2294 case REAL_TYPE:
2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2296
2297 case COMPLEX_TYPE:
2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2299 return fold_convert_loc (loc, type, tem);
2300
2301 default:
2302 gcc_unreachable ();
2303 }
2304
2305 case COMPLEX_TYPE:
2306 switch (TREE_CODE (orig))
2307 {
2308 case INTEGER_TYPE:
2309 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2310 case POINTER_TYPE: case REFERENCE_TYPE:
2311 case REAL_TYPE:
2312 case FIXED_POINT_TYPE:
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2314 fold_convert_loc (loc, TREE_TYPE (type), arg),
2315 fold_convert_loc (loc, TREE_TYPE (type),
2316 integer_zero_node));
2317 case COMPLEX_TYPE:
2318 {
2319 tree rpart, ipart;
2320
2321 if (TREE_CODE (arg) == COMPLEX_EXPR)
2322 {
2323 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2324 TREE_OPERAND (arg, 0));
2325 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2326 TREE_OPERAND (arg, 1));
2327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2328 }
2329
2330 arg = save_expr (arg);
2331 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2332 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2334 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2336 }
2337
2338 default:
2339 gcc_unreachable ();
2340 }
2341
2342 case VECTOR_TYPE:
2343 if (integer_zerop (arg))
2344 return build_zero_vector (type);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2347 || TREE_CODE (orig) == VECTOR_TYPE);
2348 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2349
2350 case VOID_TYPE:
2351 tem = fold_ignored_result (arg);
2352 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2353
2354 default:
2355 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2356 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2357 gcc_unreachable ();
2358 }
2359 fold_convert_exit:
2360 protected_set_expr_location_unshare (tem, loc);
2361 return tem;
2362 }
2363 \f
2364 /* Return false if expr can be assumed not to be an lvalue, true
2365 otherwise. */
2366
2367 static bool
2368 maybe_lvalue_p (const_tree x)
2369 {
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x))
2372 {
2373 case VAR_DECL:
2374 case PARM_DECL:
2375 case RESULT_DECL:
2376 case LABEL_DECL:
2377 case FUNCTION_DECL:
2378 case SSA_NAME:
2379
2380 case COMPONENT_REF:
2381 case MEM_REF:
2382 case INDIRECT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case BIT_FIELD_REF:
2386 case OBJ_TYPE_REF:
2387
2388 case REALPART_EXPR:
2389 case IMAGPART_EXPR:
2390 case PREINCREMENT_EXPR:
2391 case PREDECREMENT_EXPR:
2392 case SAVE_EXPR:
2393 case TRY_CATCH_EXPR:
2394 case WITH_CLEANUP_EXPR:
2395 case COMPOUND_EXPR:
2396 case MODIFY_EXPR:
2397 case TARGET_EXPR:
2398 case COND_EXPR:
2399 case BIND_EXPR:
2400 break;
2401
2402 default:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2405 break;
2406 return false;
2407 }
2408
2409 return true;
2410 }
2411
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2413
2414 tree
2415 non_lvalue_loc (location_t loc, tree x)
2416 {
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2418 us. */
2419 if (in_gimple_form)
2420 return x;
2421
2422 if (! maybe_lvalue_p (x))
2423 return x;
2424 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2425 }
2426
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2429
2430 static tree
2431 pedantic_non_lvalue_loc (location_t loc, tree x)
2432 {
2433 return protected_set_expr_location_unshare (x, loc);
2434 }
2435 \f
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2440
2441 enum tree_code
2442 invert_tree_comparison (enum tree_code code, bool honor_nans)
2443 {
2444 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2445 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2446 return ERROR_MARK;
2447
2448 switch (code)
2449 {
2450 case EQ_EXPR:
2451 return NE_EXPR;
2452 case NE_EXPR:
2453 return EQ_EXPR;
2454 case GT_EXPR:
2455 return honor_nans ? UNLE_EXPR : LE_EXPR;
2456 case GE_EXPR:
2457 return honor_nans ? UNLT_EXPR : LT_EXPR;
2458 case LT_EXPR:
2459 return honor_nans ? UNGE_EXPR : GE_EXPR;
2460 case LE_EXPR:
2461 return honor_nans ? UNGT_EXPR : GT_EXPR;
2462 case LTGT_EXPR:
2463 return UNEQ_EXPR;
2464 case UNEQ_EXPR:
2465 return LTGT_EXPR;
2466 case UNGT_EXPR:
2467 return LE_EXPR;
2468 case UNGE_EXPR:
2469 return LT_EXPR;
2470 case UNLT_EXPR:
2471 return GE_EXPR;
2472 case UNLE_EXPR:
2473 return GT_EXPR;
2474 case ORDERED_EXPR:
2475 return UNORDERED_EXPR;
2476 case UNORDERED_EXPR:
2477 return ORDERED_EXPR;
2478 default:
2479 gcc_unreachable ();
2480 }
2481 }
2482
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2485
2486 enum tree_code
2487 swap_tree_comparison (enum tree_code code)
2488 {
2489 switch (code)
2490 {
2491 case EQ_EXPR:
2492 case NE_EXPR:
2493 case ORDERED_EXPR:
2494 case UNORDERED_EXPR:
2495 case LTGT_EXPR:
2496 case UNEQ_EXPR:
2497 return code;
2498 case GT_EXPR:
2499 return LT_EXPR;
2500 case GE_EXPR:
2501 return LE_EXPR;
2502 case LT_EXPR:
2503 return GT_EXPR;
2504 case LE_EXPR:
2505 return GE_EXPR;
2506 case UNGT_EXPR:
2507 return UNLT_EXPR;
2508 case UNGE_EXPR:
2509 return UNLE_EXPR;
2510 case UNLT_EXPR:
2511 return UNGT_EXPR;
2512 case UNLE_EXPR:
2513 return UNGE_EXPR;
2514 default:
2515 gcc_unreachable ();
2516 }
2517 }
2518
2519
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2523
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code)
2526 {
2527 switch (code)
2528 {
2529 case LT_EXPR:
2530 return COMPCODE_LT;
2531 case EQ_EXPR:
2532 return COMPCODE_EQ;
2533 case LE_EXPR:
2534 return COMPCODE_LE;
2535 case GT_EXPR:
2536 return COMPCODE_GT;
2537 case NE_EXPR:
2538 return COMPCODE_NE;
2539 case GE_EXPR:
2540 return COMPCODE_GE;
2541 case ORDERED_EXPR:
2542 return COMPCODE_ORD;
2543 case UNORDERED_EXPR:
2544 return COMPCODE_UNORD;
2545 case UNLT_EXPR:
2546 return COMPCODE_UNLT;
2547 case UNEQ_EXPR:
2548 return COMPCODE_UNEQ;
2549 case UNLE_EXPR:
2550 return COMPCODE_UNLE;
2551 case UNGT_EXPR:
2552 return COMPCODE_UNGT;
2553 case LTGT_EXPR:
2554 return COMPCODE_LTGT;
2555 case UNGE_EXPR:
2556 return COMPCODE_UNGE;
2557 default:
2558 gcc_unreachable ();
2559 }
2560 }
2561
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2565
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code)
2568 {
2569 switch (code)
2570 {
2571 case COMPCODE_LT:
2572 return LT_EXPR;
2573 case COMPCODE_EQ:
2574 return EQ_EXPR;
2575 case COMPCODE_LE:
2576 return LE_EXPR;
2577 case COMPCODE_GT:
2578 return GT_EXPR;
2579 case COMPCODE_NE:
2580 return NE_EXPR;
2581 case COMPCODE_GE:
2582 return GE_EXPR;
2583 case COMPCODE_ORD:
2584 return ORDERED_EXPR;
2585 case COMPCODE_UNORD:
2586 return UNORDERED_EXPR;
2587 case COMPCODE_UNLT:
2588 return UNLT_EXPR;
2589 case COMPCODE_UNEQ:
2590 return UNEQ_EXPR;
2591 case COMPCODE_UNLE:
2592 return UNLE_EXPR;
2593 case COMPCODE_UNGT:
2594 return UNGT_EXPR;
2595 case COMPCODE_LTGT:
2596 return LTGT_EXPR;
2597 case COMPCODE_UNGE:
2598 return UNGE_EXPR;
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2609
2610 tree
2611 combine_comparisons (location_t loc,
2612 enum tree_code code, enum tree_code lcode,
2613 enum tree_code rcode, tree truth_type,
2614 tree ll_arg, tree lr_arg)
2615 {
2616 bool honor_nans = HONOR_NANS (ll_arg);
2617 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2618 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2619 int compcode;
2620
2621 switch (code)
2622 {
2623 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2624 compcode = lcompcode & rcompcode;
2625 break;
2626
2627 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2628 compcode = lcompcode | rcompcode;
2629 break;
2630
2631 default:
2632 return NULL_TREE;
2633 }
2634
2635 if (!honor_nans)
2636 {
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode &= ~COMPCODE_UNORD;
2640 if (compcode == COMPCODE_LTGT)
2641 compcode = COMPCODE_NE;
2642 else if (compcode == COMPCODE_ORD)
2643 compcode = COMPCODE_TRUE;
2644 }
2645 else if (flag_trapping_math)
2646 {
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2650 && (lcompcode != COMPCODE_EQ)
2651 && (lcompcode != COMPCODE_ORD);
2652 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2653 && (rcompcode != COMPCODE_EQ)
2654 && (rcompcode != COMPCODE_ORD);
2655 bool trap = (compcode & COMPCODE_UNORD) == 0
2656 && (compcode != COMPCODE_EQ)
2657 && (compcode != COMPCODE_ORD);
2658
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2666 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2667 rtrap = false;
2668
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2671 if (rtrap && !ltrap
2672 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2673 return NULL_TREE;
2674
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap || rtrap) != trap)
2677 return NULL_TREE;
2678 }
2679
2680 if (compcode == COMPCODE_TRUE)
2681 return constant_boolean_node (true, truth_type);
2682 else if (compcode == COMPCODE_FALSE)
2683 return constant_boolean_node (false, truth_type);
2684 else
2685 {
2686 enum tree_code tcode;
2687
2688 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2689 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2690 }
2691 }
2692 \f
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2696
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2703
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2714
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2718
2719 int
2720 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2721 {
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2724 || TREE_TYPE (arg0) == error_mark_node
2725 || TREE_TYPE (arg1) == error_mark_node)
2726 return 0;
2727
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2731 return 0;
2732
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2736 return tree_int_cst_equal (arg0, arg1);
2737
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2746
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2751 return 0;
2752
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2758
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2761
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2766 && COMPARISON_CLASS_P (arg0)
2767 && COMPARISON_CLASS_P (arg1))
2768 {
2769 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2770
2771 if (TREE_CODE (arg0) == swap_code)
2772 return operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags);
2776 }
2777
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2781 return 0;
2782
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2788 return 0;
2789
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 return 1;
2804
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2809 {
2810 case INTEGER_CST:
2811 return tree_int_cst_equal (arg0, arg1);
2812
2813 case FIXED_CST:
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2816
2817 case REAL_CST:
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2819 TREE_REAL_CST (arg1)))
2820 return 1;
2821
2822
2823 if (!HONOR_SIGNED_ZEROS (arg0))
2824 {
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0) && real_zerop (arg1))
2828 return 1;
2829 }
2830 return 0;
2831
2832 case VECTOR_CST:
2833 {
2834 unsigned i;
2835
2836 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2837 return 0;
2838
2839 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2840 {
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2842 VECTOR_CST_ELT (arg1, i), flags))
2843 return 0;
2844 }
2845 return 1;
2846 }
2847
2848 case COMPLEX_CST:
2849 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 flags)
2851 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 flags));
2853
2854 case STRING_CST:
2855 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2856 && ! memcmp (TREE_STRING_POINTER (arg0),
2857 TREE_STRING_POINTER (arg1),
2858 TREE_STRING_LENGTH (arg0)));
2859
2860 case ADDR_EXPR:
2861 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2862 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2863 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2864 default:
2865 break;
2866 }
2867
2868 if (flags & OEP_ONLY_CONST)
2869 return 0;
2870
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2877
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2881
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2883 {
2884 case tcc_unary:
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0))
2887 {
2888 CASE_CONVERT:
2889 case FIX_TRUNC_EXPR:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2892 return 0;
2893 break;
2894 default:
2895 break;
2896 }
2897
2898 return OP_SAME (0);
2899
2900
2901 case tcc_comparison:
2902 case tcc_binary:
2903 if (OP_SAME (0) && OP_SAME (1))
2904 return 1;
2905
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0))
2908 && operand_equal_p (TREE_OPERAND (arg0, 0),
2909 TREE_OPERAND (arg1, 1), flags)
2910 && operand_equal_p (TREE_OPERAND (arg0, 1),
2911 TREE_OPERAND (arg1, 0), flags));
2912
2913 case tcc_reference:
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2918 && (TREE_SIDE_EFFECTS (arg0)
2919 || TREE_SIDE_EFFECTS (arg1)))
2920 return 0;
2921
2922 switch (TREE_CODE (arg0))
2923 {
2924 case INDIRECT_REF:
2925 if (!(flags & OEP_ADDRESS_OF)
2926 && (TYPE_ALIGN (TREE_TYPE (arg0))
2927 != TYPE_ALIGN (TREE_TYPE (arg1))))
2928 return 0;
2929 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2930 return OP_SAME (0);
2931
2932 case REALPART_EXPR:
2933 case IMAGPART_EXPR:
2934 return OP_SAME (0);
2935
2936 case TARGET_MEM_REF:
2937 case MEM_REF:
2938 /* Require equal access sizes, and similar pointer types.
2939 We can have incomplete types for array references of
2940 variable-sized arrays from the Fortran frontend
2941 though. Also verify the types are compatible. */
2942 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2943 || (TYPE_SIZE (TREE_TYPE (arg0))
2944 && TYPE_SIZE (TREE_TYPE (arg1))
2945 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2946 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2947 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2948 && ((flags & OEP_ADDRESS_OF)
2949 || (alias_ptr_types_compatible_p
2950 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2951 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2952 && (MR_DEPENDENCE_CLIQUE (arg0)
2953 == MR_DEPENDENCE_CLIQUE (arg1))
2954 && (MR_DEPENDENCE_BASE (arg0)
2955 == MR_DEPENDENCE_BASE (arg1))
2956 && (TYPE_ALIGN (TREE_TYPE (arg0))
2957 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2958 return 0;
2959 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2960 return (OP_SAME (0) && OP_SAME (1)
2961 /* TARGET_MEM_REF require equal extra operands. */
2962 && (TREE_CODE (arg0) != TARGET_MEM_REF
2963 || (OP_SAME_WITH_NULL (2)
2964 && OP_SAME_WITH_NULL (3)
2965 && OP_SAME_WITH_NULL (4))));
2966
2967 case ARRAY_REF:
2968 case ARRAY_RANGE_REF:
2969 /* Operands 2 and 3 may be null.
2970 Compare the array index by value if it is constant first as we
2971 may have different types but same value here. */
2972 if (!OP_SAME (0))
2973 return 0;
2974 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2975 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2976 TREE_OPERAND (arg1, 1))
2977 || OP_SAME (1))
2978 && OP_SAME_WITH_NULL (2)
2979 && OP_SAME_WITH_NULL (3));
2980
2981 case COMPONENT_REF:
2982 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2983 may be NULL when we're called to compare MEM_EXPRs. */
2984 if (!OP_SAME_WITH_NULL (0)
2985 || !OP_SAME (1))
2986 return 0;
2987 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2988 return OP_SAME_WITH_NULL (2);
2989
2990 case BIT_FIELD_REF:
2991 if (!OP_SAME (0))
2992 return 0;
2993 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2994 return OP_SAME (1) && OP_SAME (2);
2995
2996 default:
2997 return 0;
2998 }
2999
3000 case tcc_expression:
3001 switch (TREE_CODE (arg0))
3002 {
3003 case ADDR_EXPR:
3004 return operand_equal_p (TREE_OPERAND (arg0, 0),
3005 TREE_OPERAND (arg1, 0),
3006 flags | OEP_ADDRESS_OF);
3007
3008 case TRUTH_NOT_EXPR:
3009 return OP_SAME (0);
3010
3011 case TRUTH_ANDIF_EXPR:
3012 case TRUTH_ORIF_EXPR:
3013 return OP_SAME (0) && OP_SAME (1);
3014
3015 case FMA_EXPR:
3016 case WIDEN_MULT_PLUS_EXPR:
3017 case WIDEN_MULT_MINUS_EXPR:
3018 if (!OP_SAME (2))
3019 return 0;
3020 /* The multiplcation operands are commutative. */
3021 /* FALLTHRU */
3022
3023 case TRUTH_AND_EXPR:
3024 case TRUTH_OR_EXPR:
3025 case TRUTH_XOR_EXPR:
3026 if (OP_SAME (0) && OP_SAME (1))
3027 return 1;
3028
3029 /* Otherwise take into account this is a commutative operation. */
3030 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3031 TREE_OPERAND (arg1, 1), flags)
3032 && operand_equal_p (TREE_OPERAND (arg0, 1),
3033 TREE_OPERAND (arg1, 0), flags));
3034
3035 case COND_EXPR:
3036 case VEC_COND_EXPR:
3037 case DOT_PROD_EXPR:
3038 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3039
3040 default:
3041 return 0;
3042 }
3043
3044 case tcc_vl_exp:
3045 switch (TREE_CODE (arg0))
3046 {
3047 case CALL_EXPR:
3048 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3049 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3050 /* If not both CALL_EXPRs are either internal or normal function
3051 functions, then they are not equal. */
3052 return 0;
3053 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3054 {
3055 /* If the CALL_EXPRs call different internal functions, then they
3056 are not equal. */
3057 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3058 return 0;
3059 }
3060 else
3061 {
3062 /* If the CALL_EXPRs call different functions, then they are not
3063 equal. */
3064 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3065 flags))
3066 return 0;
3067 }
3068
3069 {
3070 unsigned int cef = call_expr_flags (arg0);
3071 if (flags & OEP_PURE_SAME)
3072 cef &= ECF_CONST | ECF_PURE;
3073 else
3074 cef &= ECF_CONST;
3075 if (!cef)
3076 return 0;
3077 }
3078
3079 /* Now see if all the arguments are the same. */
3080 {
3081 const_call_expr_arg_iterator iter0, iter1;
3082 const_tree a0, a1;
3083 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3084 a1 = first_const_call_expr_arg (arg1, &iter1);
3085 a0 && a1;
3086 a0 = next_const_call_expr_arg (&iter0),
3087 a1 = next_const_call_expr_arg (&iter1))
3088 if (! operand_equal_p (a0, a1, flags))
3089 return 0;
3090
3091 /* If we get here and both argument lists are exhausted
3092 then the CALL_EXPRs are equal. */
3093 return ! (a0 || a1);
3094 }
3095 default:
3096 return 0;
3097 }
3098
3099 case tcc_declaration:
3100 /* Consider __builtin_sqrt equal to sqrt. */
3101 return (TREE_CODE (arg0) == FUNCTION_DECL
3102 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3103 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3104 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3105
3106 default:
3107 return 0;
3108 }
3109
3110 #undef OP_SAME
3111 #undef OP_SAME_WITH_NULL
3112 }
3113 \f
3114 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3115 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3116
3117 When in doubt, return 0. */
3118
3119 static int
3120 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3121 {
3122 int unsignedp1, unsignedpo;
3123 tree primarg0, primarg1, primother;
3124 unsigned int correct_width;
3125
3126 if (operand_equal_p (arg0, arg1, 0))
3127 return 1;
3128
3129 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3130 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3131 return 0;
3132
3133 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3134 and see if the inner values are the same. This removes any
3135 signedness comparison, which doesn't matter here. */
3136 primarg0 = arg0, primarg1 = arg1;
3137 STRIP_NOPS (primarg0);
3138 STRIP_NOPS (primarg1);
3139 if (operand_equal_p (primarg0, primarg1, 0))
3140 return 1;
3141
3142 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3143 actual comparison operand, ARG0.
3144
3145 First throw away any conversions to wider types
3146 already present in the operands. */
3147
3148 primarg1 = get_narrower (arg1, &unsignedp1);
3149 primother = get_narrower (other, &unsignedpo);
3150
3151 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3152 if (unsignedp1 == unsignedpo
3153 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3154 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3155 {
3156 tree type = TREE_TYPE (arg0);
3157
3158 /* Make sure shorter operand is extended the right way
3159 to match the longer operand. */
3160 primarg1 = fold_convert (signed_or_unsigned_type_for
3161 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3162
3163 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3164 return 1;
3165 }
3166
3167 return 0;
3168 }
3169 \f
3170 /* See if ARG is an expression that is either a comparison or is performing
3171 arithmetic on comparisons. The comparisons must only be comparing
3172 two different values, which will be stored in *CVAL1 and *CVAL2; if
3173 they are nonzero it means that some operands have already been found.
3174 No variables may be used anywhere else in the expression except in the
3175 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3176 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3177
3178 If this is true, return 1. Otherwise, return zero. */
3179
3180 static int
3181 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3182 {
3183 enum tree_code code = TREE_CODE (arg);
3184 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3185
3186 /* We can handle some of the tcc_expression cases here. */
3187 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3188 tclass = tcc_unary;
3189 else if (tclass == tcc_expression
3190 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3191 || code == COMPOUND_EXPR))
3192 tclass = tcc_binary;
3193
3194 else if (tclass == tcc_expression && code == SAVE_EXPR
3195 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3196 {
3197 /* If we've already found a CVAL1 or CVAL2, this expression is
3198 two complex to handle. */
3199 if (*cval1 || *cval2)
3200 return 0;
3201
3202 tclass = tcc_unary;
3203 *save_p = 1;
3204 }
3205
3206 switch (tclass)
3207 {
3208 case tcc_unary:
3209 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3210
3211 case tcc_binary:
3212 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3213 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3214 cval1, cval2, save_p));
3215
3216 case tcc_constant:
3217 return 1;
3218
3219 case tcc_expression:
3220 if (code == COND_EXPR)
3221 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3222 cval1, cval2, save_p)
3223 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3224 cval1, cval2, save_p)
3225 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3226 cval1, cval2, save_p));
3227 return 0;
3228
3229 case tcc_comparison:
3230 /* First see if we can handle the first operand, then the second. For
3231 the second operand, we know *CVAL1 can't be zero. It must be that
3232 one side of the comparison is each of the values; test for the
3233 case where this isn't true by failing if the two operands
3234 are the same. */
3235
3236 if (operand_equal_p (TREE_OPERAND (arg, 0),
3237 TREE_OPERAND (arg, 1), 0))
3238 return 0;
3239
3240 if (*cval1 == 0)
3241 *cval1 = TREE_OPERAND (arg, 0);
3242 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3243 ;
3244 else if (*cval2 == 0)
3245 *cval2 = TREE_OPERAND (arg, 0);
3246 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3247 ;
3248 else
3249 return 0;
3250
3251 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3252 ;
3253 else if (*cval2 == 0)
3254 *cval2 = TREE_OPERAND (arg, 1);
3255 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3256 ;
3257 else
3258 return 0;
3259
3260 return 1;
3261
3262 default:
3263 return 0;
3264 }
3265 }
3266 \f
3267 /* ARG is a tree that is known to contain just arithmetic operations and
3268 comparisons. Evaluate the operations in the tree substituting NEW0 for
3269 any occurrence of OLD0 as an operand of a comparison and likewise for
3270 NEW1 and OLD1. */
3271
3272 static tree
3273 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3274 tree old1, tree new1)
3275 {
3276 tree type = TREE_TYPE (arg);
3277 enum tree_code code = TREE_CODE (arg);
3278 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3279
3280 /* We can handle some of the tcc_expression cases here. */
3281 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3282 tclass = tcc_unary;
3283 else if (tclass == tcc_expression
3284 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3285 tclass = tcc_binary;
3286
3287 switch (tclass)
3288 {
3289 case tcc_unary:
3290 return fold_build1_loc (loc, code, type,
3291 eval_subst (loc, TREE_OPERAND (arg, 0),
3292 old0, new0, old1, new1));
3293
3294 case tcc_binary:
3295 return fold_build2_loc (loc, code, type,
3296 eval_subst (loc, TREE_OPERAND (arg, 0),
3297 old0, new0, old1, new1),
3298 eval_subst (loc, TREE_OPERAND (arg, 1),
3299 old0, new0, old1, new1));
3300
3301 case tcc_expression:
3302 switch (code)
3303 {
3304 case SAVE_EXPR:
3305 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3306 old1, new1);
3307
3308 case COMPOUND_EXPR:
3309 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3310 old1, new1);
3311
3312 case COND_EXPR:
3313 return fold_build3_loc (loc, code, type,
3314 eval_subst (loc, TREE_OPERAND (arg, 0),
3315 old0, new0, old1, new1),
3316 eval_subst (loc, TREE_OPERAND (arg, 1),
3317 old0, new0, old1, new1),
3318 eval_subst (loc, TREE_OPERAND (arg, 2),
3319 old0, new0, old1, new1));
3320 default:
3321 break;
3322 }
3323 /* Fall through - ??? */
3324
3325 case tcc_comparison:
3326 {
3327 tree arg0 = TREE_OPERAND (arg, 0);
3328 tree arg1 = TREE_OPERAND (arg, 1);
3329
3330 /* We need to check both for exact equality and tree equality. The
3331 former will be true if the operand has a side-effect. In that
3332 case, we know the operand occurred exactly once. */
3333
3334 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3335 arg0 = new0;
3336 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3337 arg0 = new1;
3338
3339 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3340 arg1 = new0;
3341 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3342 arg1 = new1;
3343
3344 return fold_build2_loc (loc, code, type, arg0, arg1);
3345 }
3346
3347 default:
3348 return arg;
3349 }
3350 }
3351 \f
3352 /* Return a tree for the case when the result of an expression is RESULT
3353 converted to TYPE and OMITTED was previously an operand of the expression
3354 but is now not needed (e.g., we folded OMITTED * 0).
3355
3356 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3357 the conversion of RESULT to TYPE. */
3358
3359 tree
3360 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3361 {
3362 tree t = fold_convert_loc (loc, type, result);
3363
3364 /* If the resulting operand is an empty statement, just return the omitted
3365 statement casted to void. */
3366 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3367 return build1_loc (loc, NOP_EXPR, void_type_node,
3368 fold_ignored_result (omitted));
3369
3370 if (TREE_SIDE_EFFECTS (omitted))
3371 return build2_loc (loc, COMPOUND_EXPR, type,
3372 fold_ignored_result (omitted), t);
3373
3374 return non_lvalue_loc (loc, t);
3375 }
3376
3377 /* Return a tree for the case when the result of an expression is RESULT
3378 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3379 of the expression but are now not needed.
3380
3381 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3382 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3383 evaluated before OMITTED2. Otherwise, if neither has side effects,
3384 just do the conversion of RESULT to TYPE. */
3385
3386 tree
3387 omit_two_operands_loc (location_t loc, tree type, tree result,
3388 tree omitted1, tree omitted2)
3389 {
3390 tree t = fold_convert_loc (loc, type, result);
3391
3392 if (TREE_SIDE_EFFECTS (omitted2))
3393 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3394 if (TREE_SIDE_EFFECTS (omitted1))
3395 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3396
3397 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3398 }
3399
3400 \f
3401 /* Return a simplified tree node for the truth-negation of ARG. This
3402 never alters ARG itself. We assume that ARG is an operation that
3403 returns a truth value (0 or 1).
3404
3405 FIXME: one would think we would fold the result, but it causes
3406 problems with the dominator optimizer. */
3407
3408 static tree
3409 fold_truth_not_expr (location_t loc, tree arg)
3410 {
3411 tree type = TREE_TYPE (arg);
3412 enum tree_code code = TREE_CODE (arg);
3413 location_t loc1, loc2;
3414
3415 /* If this is a comparison, we can simply invert it, except for
3416 floating-point non-equality comparisons, in which case we just
3417 enclose a TRUTH_NOT_EXPR around what we have. */
3418
3419 if (TREE_CODE_CLASS (code) == tcc_comparison)
3420 {
3421 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3422 if (FLOAT_TYPE_P (op_type)
3423 && flag_trapping_math
3424 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3425 && code != NE_EXPR && code != EQ_EXPR)
3426 return NULL_TREE;
3427
3428 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3429 if (code == ERROR_MARK)
3430 return NULL_TREE;
3431
3432 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3433 TREE_OPERAND (arg, 1));
3434 }
3435
3436 switch (code)
3437 {
3438 case INTEGER_CST:
3439 return constant_boolean_node (integer_zerop (arg), type);
3440
3441 case TRUTH_AND_EXPR:
3442 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3443 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3444 return build2_loc (loc, TRUTH_OR_EXPR, type,
3445 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3446 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3447
3448 case TRUTH_OR_EXPR:
3449 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3450 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3451 return build2_loc (loc, TRUTH_AND_EXPR, type,
3452 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3453 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3454
3455 case TRUTH_XOR_EXPR:
3456 /* Here we can invert either operand. We invert the first operand
3457 unless the second operand is a TRUTH_NOT_EXPR in which case our
3458 result is the XOR of the first operand with the inside of the
3459 negation of the second operand. */
3460
3461 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3462 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3463 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3464 else
3465 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3466 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3467 TREE_OPERAND (arg, 1));
3468
3469 case TRUTH_ANDIF_EXPR:
3470 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3471 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3472 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3473 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3474 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3475
3476 case TRUTH_ORIF_EXPR:
3477 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3478 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3479 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3480 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3481 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3482
3483 case TRUTH_NOT_EXPR:
3484 return TREE_OPERAND (arg, 0);
3485
3486 case COND_EXPR:
3487 {
3488 tree arg1 = TREE_OPERAND (arg, 1);
3489 tree arg2 = TREE_OPERAND (arg, 2);
3490
3491 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3492 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3493
3494 /* A COND_EXPR may have a throw as one operand, which
3495 then has void type. Just leave void operands
3496 as they are. */
3497 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3498 VOID_TYPE_P (TREE_TYPE (arg1))
3499 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3500 VOID_TYPE_P (TREE_TYPE (arg2))
3501 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3502 }
3503
3504 case COMPOUND_EXPR:
3505 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3506 return build2_loc (loc, COMPOUND_EXPR, type,
3507 TREE_OPERAND (arg, 0),
3508 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3509
3510 case NON_LVALUE_EXPR:
3511 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3512 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3513
3514 CASE_CONVERT:
3515 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3516 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3517
3518 /* ... fall through ... */
3519
3520 case FLOAT_EXPR:
3521 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3522 return build1_loc (loc, TREE_CODE (arg), type,
3523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3524
3525 case BIT_AND_EXPR:
3526 if (!integer_onep (TREE_OPERAND (arg, 1)))
3527 return NULL_TREE;
3528 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3529
3530 case SAVE_EXPR:
3531 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3532
3533 case CLEANUP_POINT_EXPR:
3534 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3535 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3536 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3537
3538 default:
3539 return NULL_TREE;
3540 }
3541 }
3542
3543 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3544 assume that ARG is an operation that returns a truth value (0 or 1
3545 for scalars, 0 or -1 for vectors). Return the folded expression if
3546 folding is successful. Otherwise, return NULL_TREE. */
3547
3548 static tree
3549 fold_invert_truthvalue (location_t loc, tree arg)
3550 {
3551 tree type = TREE_TYPE (arg);
3552 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3553 ? BIT_NOT_EXPR
3554 : TRUTH_NOT_EXPR,
3555 type, arg);
3556 }
3557
3558 /* Return a simplified tree node for the truth-negation of ARG. This
3559 never alters ARG itself. We assume that ARG is an operation that
3560 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3561
3562 tree
3563 invert_truthvalue_loc (location_t loc, tree arg)
3564 {
3565 if (TREE_CODE (arg) == ERROR_MARK)
3566 return arg;
3567
3568 tree type = TREE_TYPE (arg);
3569 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3570 ? BIT_NOT_EXPR
3571 : TRUTH_NOT_EXPR,
3572 type, arg);
3573 }
3574
3575 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3576 operands are another bit-wise operation with a common input. If so,
3577 distribute the bit operations to save an operation and possibly two if
3578 constants are involved. For example, convert
3579 (A | B) & (A | C) into A | (B & C)
3580 Further simplification will occur if B and C are constants.
3581
3582 If this optimization cannot be done, 0 will be returned. */
3583
3584 static tree
3585 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3586 tree arg0, tree arg1)
3587 {
3588 tree common;
3589 tree left, right;
3590
3591 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3592 || TREE_CODE (arg0) == code
3593 || (TREE_CODE (arg0) != BIT_AND_EXPR
3594 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3595 return 0;
3596
3597 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3598 {
3599 common = TREE_OPERAND (arg0, 0);
3600 left = TREE_OPERAND (arg0, 1);
3601 right = TREE_OPERAND (arg1, 1);
3602 }
3603 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3604 {
3605 common = TREE_OPERAND (arg0, 0);
3606 left = TREE_OPERAND (arg0, 1);
3607 right = TREE_OPERAND (arg1, 0);
3608 }
3609 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3610 {
3611 common = TREE_OPERAND (arg0, 1);
3612 left = TREE_OPERAND (arg0, 0);
3613 right = TREE_OPERAND (arg1, 1);
3614 }
3615 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3616 {
3617 common = TREE_OPERAND (arg0, 1);
3618 left = TREE_OPERAND (arg0, 0);
3619 right = TREE_OPERAND (arg1, 0);
3620 }
3621 else
3622 return 0;
3623
3624 common = fold_convert_loc (loc, type, common);
3625 left = fold_convert_loc (loc, type, left);
3626 right = fold_convert_loc (loc, type, right);
3627 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3628 fold_build2_loc (loc, code, type, left, right));
3629 }
3630
3631 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3632 with code CODE. This optimization is unsafe. */
3633 static tree
3634 distribute_real_division (location_t loc, enum tree_code code, tree type,
3635 tree arg0, tree arg1)
3636 {
3637 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3638 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3639
3640 /* (A / C) +- (B / C) -> (A +- B) / C. */
3641 if (mul0 == mul1
3642 && operand_equal_p (TREE_OPERAND (arg0, 1),
3643 TREE_OPERAND (arg1, 1), 0))
3644 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3645 fold_build2_loc (loc, code, type,
3646 TREE_OPERAND (arg0, 0),
3647 TREE_OPERAND (arg1, 0)),
3648 TREE_OPERAND (arg0, 1));
3649
3650 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3651 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3652 TREE_OPERAND (arg1, 0), 0)
3653 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3654 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3655 {
3656 REAL_VALUE_TYPE r0, r1;
3657 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3658 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3659 if (!mul0)
3660 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3661 if (!mul1)
3662 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3663 real_arithmetic (&r0, code, &r0, &r1);
3664 return fold_build2_loc (loc, MULT_EXPR, type,
3665 TREE_OPERAND (arg0, 0),
3666 build_real (type, r0));
3667 }
3668
3669 return NULL_TREE;
3670 }
3671 \f
3672 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3673 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3674
3675 static tree
3676 make_bit_field_ref (location_t loc, tree inner, tree type,
3677 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3678 {
3679 tree result, bftype;
3680
3681 if (bitpos == 0)
3682 {
3683 tree size = TYPE_SIZE (TREE_TYPE (inner));
3684 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3685 || POINTER_TYPE_P (TREE_TYPE (inner)))
3686 && tree_fits_shwi_p (size)
3687 && tree_to_shwi (size) == bitsize)
3688 return fold_convert_loc (loc, type, inner);
3689 }
3690
3691 bftype = type;
3692 if (TYPE_PRECISION (bftype) != bitsize
3693 || TYPE_UNSIGNED (bftype) == !unsignedp)
3694 bftype = build_nonstandard_integer_type (bitsize, 0);
3695
3696 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3697 size_int (bitsize), bitsize_int (bitpos));
3698
3699 if (bftype != type)
3700 result = fold_convert_loc (loc, type, result);
3701
3702 return result;
3703 }
3704
3705 /* Optimize a bit-field compare.
3706
3707 There are two cases: First is a compare against a constant and the
3708 second is a comparison of two items where the fields are at the same
3709 bit position relative to the start of a chunk (byte, halfword, word)
3710 large enough to contain it. In these cases we can avoid the shift
3711 implicit in bitfield extractions.
3712
3713 For constants, we emit a compare of the shifted constant with the
3714 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3715 compared. For two fields at the same position, we do the ANDs with the
3716 similar mask and compare the result of the ANDs.
3717
3718 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3719 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3720 are the left and right operands of the comparison, respectively.
3721
3722 If the optimization described above can be done, we return the resulting
3723 tree. Otherwise we return zero. */
3724
3725 static tree
3726 optimize_bit_field_compare (location_t loc, enum tree_code code,
3727 tree compare_type, tree lhs, tree rhs)
3728 {
3729 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3730 tree type = TREE_TYPE (lhs);
3731 tree unsigned_type;
3732 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3733 machine_mode lmode, rmode, nmode;
3734 int lunsignedp, runsignedp;
3735 int lvolatilep = 0, rvolatilep = 0;
3736 tree linner, rinner = NULL_TREE;
3737 tree mask;
3738 tree offset;
3739
3740 /* Get all the information about the extractions being done. If the bit size
3741 if the same as the size of the underlying object, we aren't doing an
3742 extraction at all and so can do nothing. We also don't want to
3743 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3744 then will no longer be able to replace it. */
3745 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3746 &lunsignedp, &lvolatilep, false);
3747 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3748 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3749 return 0;
3750
3751 if (!const_p)
3752 {
3753 /* If this is not a constant, we can only do something if bit positions,
3754 sizes, and signedness are the same. */
3755 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3756 &runsignedp, &rvolatilep, false);
3757
3758 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3759 || lunsignedp != runsignedp || offset != 0
3760 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3761 return 0;
3762 }
3763
3764 /* See if we can find a mode to refer to this field. We should be able to,
3765 but fail if we can't. */
3766 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3767 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3768 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3769 TYPE_ALIGN (TREE_TYPE (rinner))),
3770 word_mode, false);
3771 if (nmode == VOIDmode)
3772 return 0;
3773
3774 /* Set signed and unsigned types of the precision of this mode for the
3775 shifts below. */
3776 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3777
3778 /* Compute the bit position and size for the new reference and our offset
3779 within it. If the new reference is the same size as the original, we
3780 won't optimize anything, so return zero. */
3781 nbitsize = GET_MODE_BITSIZE (nmode);
3782 nbitpos = lbitpos & ~ (nbitsize - 1);
3783 lbitpos -= nbitpos;
3784 if (nbitsize == lbitsize)
3785 return 0;
3786
3787 if (BYTES_BIG_ENDIAN)
3788 lbitpos = nbitsize - lbitsize - lbitpos;
3789
3790 /* Make the mask to be used against the extracted field. */
3791 mask = build_int_cst_type (unsigned_type, -1);
3792 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3793 mask = const_binop (RSHIFT_EXPR, mask,
3794 size_int (nbitsize - lbitsize - lbitpos));
3795
3796 if (! const_p)
3797 /* If not comparing with constant, just rework the comparison
3798 and return. */
3799 return fold_build2_loc (loc, code, compare_type,
3800 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3801 make_bit_field_ref (loc, linner,
3802 unsigned_type,
3803 nbitsize, nbitpos,
3804 1),
3805 mask),
3806 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3807 make_bit_field_ref (loc, rinner,
3808 unsigned_type,
3809 nbitsize, nbitpos,
3810 1),
3811 mask));
3812
3813 /* Otherwise, we are handling the constant case. See if the constant is too
3814 big for the field. Warn and return a tree of for 0 (false) if so. We do
3815 this not only for its own sake, but to avoid having to test for this
3816 error case below. If we didn't, we might generate wrong code.
3817
3818 For unsigned fields, the constant shifted right by the field length should
3819 be all zero. For signed fields, the high-order bits should agree with
3820 the sign bit. */
3821
3822 if (lunsignedp)
3823 {
3824 if (wi::lrshift (rhs, lbitsize) != 0)
3825 {
3826 warning (0, "comparison is always %d due to width of bit-field",
3827 code == NE_EXPR);
3828 return constant_boolean_node (code == NE_EXPR, compare_type);
3829 }
3830 }
3831 else
3832 {
3833 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3834 if (tem != 0 && tem != -1)
3835 {
3836 warning (0, "comparison is always %d due to width of bit-field",
3837 code == NE_EXPR);
3838 return constant_boolean_node (code == NE_EXPR, compare_type);
3839 }
3840 }
3841
3842 /* Single-bit compares should always be against zero. */
3843 if (lbitsize == 1 && ! integer_zerop (rhs))
3844 {
3845 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3846 rhs = build_int_cst (type, 0);
3847 }
3848
3849 /* Make a new bitfield reference, shift the constant over the
3850 appropriate number of bits and mask it with the computed mask
3851 (in case this was a signed field). If we changed it, make a new one. */
3852 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3853
3854 rhs = const_binop (BIT_AND_EXPR,
3855 const_binop (LSHIFT_EXPR,
3856 fold_convert_loc (loc, unsigned_type, rhs),
3857 size_int (lbitpos)),
3858 mask);
3859
3860 lhs = build2_loc (loc, code, compare_type,
3861 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3862 return lhs;
3863 }
3864 \f
3865 /* Subroutine for fold_truth_andor_1: decode a field reference.
3866
3867 If EXP is a comparison reference, we return the innermost reference.
3868
3869 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3870 set to the starting bit number.
3871
3872 If the innermost field can be completely contained in a mode-sized
3873 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3874
3875 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3876 otherwise it is not changed.
3877
3878 *PUNSIGNEDP is set to the signedness of the field.
3879
3880 *PMASK is set to the mask used. This is either contained in a
3881 BIT_AND_EXPR or derived from the width of the field.
3882
3883 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3884
3885 Return 0 if this is not a component reference or is one that we can't
3886 do anything with. */
3887
3888 static tree
3889 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3890 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3891 int *punsignedp, int *pvolatilep,
3892 tree *pmask, tree *pand_mask)
3893 {
3894 tree outer_type = 0;
3895 tree and_mask = 0;
3896 tree mask, inner, offset;
3897 tree unsigned_type;
3898 unsigned int precision;
3899
3900 /* All the optimizations using this function assume integer fields.
3901 There are problems with FP fields since the type_for_size call
3902 below can fail for, e.g., XFmode. */
3903 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3904 return 0;
3905
3906 /* We are interested in the bare arrangement of bits, so strip everything
3907 that doesn't affect the machine mode. However, record the type of the
3908 outermost expression if it may matter below. */
3909 if (CONVERT_EXPR_P (exp)
3910 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3911 outer_type = TREE_TYPE (exp);
3912 STRIP_NOPS (exp);
3913
3914 if (TREE_CODE (exp) == BIT_AND_EXPR)
3915 {
3916 and_mask = TREE_OPERAND (exp, 1);
3917 exp = TREE_OPERAND (exp, 0);
3918 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3919 if (TREE_CODE (and_mask) != INTEGER_CST)
3920 return 0;
3921 }
3922
3923 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3924 punsignedp, pvolatilep, false);
3925 if ((inner == exp && and_mask == 0)
3926 || *pbitsize < 0 || offset != 0
3927 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3928 return 0;
3929
3930 /* If the number of bits in the reference is the same as the bitsize of
3931 the outer type, then the outer type gives the signedness. Otherwise
3932 (in case of a small bitfield) the signedness is unchanged. */
3933 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3934 *punsignedp = TYPE_UNSIGNED (outer_type);
3935
3936 /* Compute the mask to access the bitfield. */
3937 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3938 precision = TYPE_PRECISION (unsigned_type);
3939
3940 mask = build_int_cst_type (unsigned_type, -1);
3941
3942 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3943 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3944
3945 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3946 if (and_mask != 0)
3947 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3948 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3949
3950 *pmask = mask;
3951 *pand_mask = and_mask;
3952 return inner;
3953 }
3954
3955 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3956 bit positions and MASK is SIGNED. */
3957
3958 static int
3959 all_ones_mask_p (const_tree mask, unsigned int size)
3960 {
3961 tree type = TREE_TYPE (mask);
3962 unsigned int precision = TYPE_PRECISION (type);
3963
3964 /* If this function returns true when the type of the mask is
3965 UNSIGNED, then there will be errors. In particular see
3966 gcc.c-torture/execute/990326-1.c. There does not appear to be
3967 any documentation paper trail as to why this is so. But the pre
3968 wide-int worked with that restriction and it has been preserved
3969 here. */
3970 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3971 return false;
3972
3973 return wi::mask (size, false, precision) == mask;
3974 }
3975
3976 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3977 represents the sign bit of EXP's type. If EXP represents a sign
3978 or zero extension, also test VAL against the unextended type.
3979 The return value is the (sub)expression whose sign bit is VAL,
3980 or NULL_TREE otherwise. */
3981
3982 tree
3983 sign_bit_p (tree exp, const_tree val)
3984 {
3985 int width;
3986 tree t;
3987
3988 /* Tree EXP must have an integral type. */
3989 t = TREE_TYPE (exp);
3990 if (! INTEGRAL_TYPE_P (t))
3991 return NULL_TREE;
3992
3993 /* Tree VAL must be an integer constant. */
3994 if (TREE_CODE (val) != INTEGER_CST
3995 || TREE_OVERFLOW (val))
3996 return NULL_TREE;
3997
3998 width = TYPE_PRECISION (t);
3999 if (wi::only_sign_bit_p (val, width))
4000 return exp;
4001
4002 /* Handle extension from a narrower type. */
4003 if (TREE_CODE (exp) == NOP_EXPR
4004 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4005 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4006
4007 return NULL_TREE;
4008 }
4009
4010 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4011 to be evaluated unconditionally. */
4012
4013 static int
4014 simple_operand_p (const_tree exp)
4015 {
4016 /* Strip any conversions that don't change the machine mode. */
4017 STRIP_NOPS (exp);
4018
4019 return (CONSTANT_CLASS_P (exp)
4020 || TREE_CODE (exp) == SSA_NAME
4021 || (DECL_P (exp)
4022 && ! TREE_ADDRESSABLE (exp)
4023 && ! TREE_THIS_VOLATILE (exp)
4024 && ! DECL_NONLOCAL (exp)
4025 /* Don't regard global variables as simple. They may be
4026 allocated in ways unknown to the compiler (shared memory,
4027 #pragma weak, etc). */
4028 && ! TREE_PUBLIC (exp)
4029 && ! DECL_EXTERNAL (exp)
4030 /* Weakrefs are not safe to be read, since they can be NULL.
4031 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4032 have DECL_WEAK flag set. */
4033 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4034 /* Loading a static variable is unduly expensive, but global
4035 registers aren't expensive. */
4036 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4037 }
4038
4039 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4040 to be evaluated unconditionally.
4041 I addition to simple_operand_p, we assume that comparisons, conversions,
4042 and logic-not operations are simple, if their operands are simple, too. */
4043
4044 static bool
4045 simple_operand_p_2 (tree exp)
4046 {
4047 enum tree_code code;
4048
4049 if (TREE_SIDE_EFFECTS (exp)
4050 || tree_could_trap_p (exp))
4051 return false;
4052
4053 while (CONVERT_EXPR_P (exp))
4054 exp = TREE_OPERAND (exp, 0);
4055
4056 code = TREE_CODE (exp);
4057
4058 if (TREE_CODE_CLASS (code) == tcc_comparison)
4059 return (simple_operand_p (TREE_OPERAND (exp, 0))
4060 && simple_operand_p (TREE_OPERAND (exp, 1)));
4061
4062 if (code == TRUTH_NOT_EXPR)
4063 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4064
4065 return simple_operand_p (exp);
4066 }
4067
4068 \f
4069 /* The following functions are subroutines to fold_range_test and allow it to
4070 try to change a logical combination of comparisons into a range test.
4071
4072 For example, both
4073 X == 2 || X == 3 || X == 4 || X == 5
4074 and
4075 X >= 2 && X <= 5
4076 are converted to
4077 (unsigned) (X - 2) <= 3
4078
4079 We describe each set of comparisons as being either inside or outside
4080 a range, using a variable named like IN_P, and then describe the
4081 range with a lower and upper bound. If one of the bounds is omitted,
4082 it represents either the highest or lowest value of the type.
4083
4084 In the comments below, we represent a range by two numbers in brackets
4085 preceded by a "+" to designate being inside that range, or a "-" to
4086 designate being outside that range, so the condition can be inverted by
4087 flipping the prefix. An omitted bound is represented by a "-". For
4088 example, "- [-, 10]" means being outside the range starting at the lowest
4089 possible value and ending at 10, in other words, being greater than 10.
4090 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4091 always false.
4092
4093 We set up things so that the missing bounds are handled in a consistent
4094 manner so neither a missing bound nor "true" and "false" need to be
4095 handled using a special case. */
4096
4097 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4098 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4099 and UPPER1_P are nonzero if the respective argument is an upper bound
4100 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4101 must be specified for a comparison. ARG1 will be converted to ARG0's
4102 type if both are specified. */
4103
4104 static tree
4105 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4106 tree arg1, int upper1_p)
4107 {
4108 tree tem;
4109 int result;
4110 int sgn0, sgn1;
4111
4112 /* If neither arg represents infinity, do the normal operation.
4113 Else, if not a comparison, return infinity. Else handle the special
4114 comparison rules. Note that most of the cases below won't occur, but
4115 are handled for consistency. */
4116
4117 if (arg0 != 0 && arg1 != 0)
4118 {
4119 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4120 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4121 STRIP_NOPS (tem);
4122 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4123 }
4124
4125 if (TREE_CODE_CLASS (code) != tcc_comparison)
4126 return 0;
4127
4128 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4129 for neither. In real maths, we cannot assume open ended ranges are
4130 the same. But, this is computer arithmetic, where numbers are finite.
4131 We can therefore make the transformation of any unbounded range with
4132 the value Z, Z being greater than any representable number. This permits
4133 us to treat unbounded ranges as equal. */
4134 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4135 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4136 switch (code)
4137 {
4138 case EQ_EXPR:
4139 result = sgn0 == sgn1;
4140 break;
4141 case NE_EXPR:
4142 result = sgn0 != sgn1;
4143 break;
4144 case LT_EXPR:
4145 result = sgn0 < sgn1;
4146 break;
4147 case LE_EXPR:
4148 result = sgn0 <= sgn1;
4149 break;
4150 case GT_EXPR:
4151 result = sgn0 > sgn1;
4152 break;
4153 case GE_EXPR:
4154 result = sgn0 >= sgn1;
4155 break;
4156 default:
4157 gcc_unreachable ();
4158 }
4159
4160 return constant_boolean_node (result, type);
4161 }
4162 \f
4163 /* Helper routine for make_range. Perform one step for it, return
4164 new expression if the loop should continue or NULL_TREE if it should
4165 stop. */
4166
4167 tree
4168 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4169 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4170 bool *strict_overflow_p)
4171 {
4172 tree arg0_type = TREE_TYPE (arg0);
4173 tree n_low, n_high, low = *p_low, high = *p_high;
4174 int in_p = *p_in_p, n_in_p;
4175
4176 switch (code)
4177 {
4178 case TRUTH_NOT_EXPR:
4179 /* We can only do something if the range is testing for zero. */
4180 if (low == NULL_TREE || high == NULL_TREE
4181 || ! integer_zerop (low) || ! integer_zerop (high))
4182 return NULL_TREE;
4183 *p_in_p = ! in_p;
4184 return arg0;
4185
4186 case EQ_EXPR: case NE_EXPR:
4187 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4188 /* We can only do something if the range is testing for zero
4189 and if the second operand is an integer constant. Note that
4190 saying something is "in" the range we make is done by
4191 complementing IN_P since it will set in the initial case of
4192 being not equal to zero; "out" is leaving it alone. */
4193 if (low == NULL_TREE || high == NULL_TREE
4194 || ! integer_zerop (low) || ! integer_zerop (high)
4195 || TREE_CODE (arg1) != INTEGER_CST)
4196 return NULL_TREE;
4197
4198 switch (code)
4199 {
4200 case NE_EXPR: /* - [c, c] */
4201 low = high = arg1;
4202 break;
4203 case EQ_EXPR: /* + [c, c] */
4204 in_p = ! in_p, low = high = arg1;
4205 break;
4206 case GT_EXPR: /* - [-, c] */
4207 low = 0, high = arg1;
4208 break;
4209 case GE_EXPR: /* + [c, -] */
4210 in_p = ! in_p, low = arg1, high = 0;
4211 break;
4212 case LT_EXPR: /* - [c, -] */
4213 low = arg1, high = 0;
4214 break;
4215 case LE_EXPR: /* + [-, c] */
4216 in_p = ! in_p, low = 0, high = arg1;
4217 break;
4218 default:
4219 gcc_unreachable ();
4220 }
4221
4222 /* If this is an unsigned comparison, we also know that EXP is
4223 greater than or equal to zero. We base the range tests we make
4224 on that fact, so we record it here so we can parse existing
4225 range tests. We test arg0_type since often the return type
4226 of, e.g. EQ_EXPR, is boolean. */
4227 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4228 {
4229 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4230 in_p, low, high, 1,
4231 build_int_cst (arg0_type, 0),
4232 NULL_TREE))
4233 return NULL_TREE;
4234
4235 in_p = n_in_p, low = n_low, high = n_high;
4236
4237 /* If the high bound is missing, but we have a nonzero low
4238 bound, reverse the range so it goes from zero to the low bound
4239 minus 1. */
4240 if (high == 0 && low && ! integer_zerop (low))
4241 {
4242 in_p = ! in_p;
4243 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4244 build_int_cst (TREE_TYPE (low), 1), 0);
4245 low = build_int_cst (arg0_type, 0);
4246 }
4247 }
4248
4249 *p_low = low;
4250 *p_high = high;
4251 *p_in_p = in_p;
4252 return arg0;
4253
4254 case NEGATE_EXPR:
4255 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4256 low and high are non-NULL, then normalize will DTRT. */
4257 if (!TYPE_UNSIGNED (arg0_type)
4258 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4259 {
4260 if (low == NULL_TREE)
4261 low = TYPE_MIN_VALUE (arg0_type);
4262 if (high == NULL_TREE)
4263 high = TYPE_MAX_VALUE (arg0_type);
4264 }
4265
4266 /* (-x) IN [a,b] -> x in [-b, -a] */
4267 n_low = range_binop (MINUS_EXPR, exp_type,
4268 build_int_cst (exp_type, 0),
4269 0, high, 1);
4270 n_high = range_binop (MINUS_EXPR, exp_type,
4271 build_int_cst (exp_type, 0),
4272 0, low, 0);
4273 if (n_high != 0 && TREE_OVERFLOW (n_high))
4274 return NULL_TREE;
4275 goto normalize;
4276
4277 case BIT_NOT_EXPR:
4278 /* ~ X -> -X - 1 */
4279 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4280 build_int_cst (exp_type, 1));
4281
4282 case PLUS_EXPR:
4283 case MINUS_EXPR:
4284 if (TREE_CODE (arg1) != INTEGER_CST)
4285 return NULL_TREE;
4286
4287 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4288 move a constant to the other side. */
4289 if (!TYPE_UNSIGNED (arg0_type)
4290 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4291 return NULL_TREE;
4292
4293 /* If EXP is signed, any overflow in the computation is undefined,
4294 so we don't worry about it so long as our computations on
4295 the bounds don't overflow. For unsigned, overflow is defined
4296 and this is exactly the right thing. */
4297 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4298 arg0_type, low, 0, arg1, 0);
4299 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4300 arg0_type, high, 1, arg1, 0);
4301 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4302 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4303 return NULL_TREE;
4304
4305 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4306 *strict_overflow_p = true;
4307
4308 normalize:
4309 /* Check for an unsigned range which has wrapped around the maximum
4310 value thus making n_high < n_low, and normalize it. */
4311 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4312 {
4313 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4314 build_int_cst (TREE_TYPE (n_high), 1), 0);
4315 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4316 build_int_cst (TREE_TYPE (n_low), 1), 0);
4317
4318 /* If the range is of the form +/- [ x+1, x ], we won't
4319 be able to normalize it. But then, it represents the
4320 whole range or the empty set, so make it
4321 +/- [ -, - ]. */
4322 if (tree_int_cst_equal (n_low, low)
4323 && tree_int_cst_equal (n_high, high))
4324 low = high = 0;
4325 else
4326 in_p = ! in_p;
4327 }
4328 else
4329 low = n_low, high = n_high;
4330
4331 *p_low = low;
4332 *p_high = high;
4333 *p_in_p = in_p;
4334 return arg0;
4335
4336 CASE_CONVERT:
4337 case NON_LVALUE_EXPR:
4338 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4339 return NULL_TREE;
4340
4341 if (! INTEGRAL_TYPE_P (arg0_type)
4342 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4343 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4344 return NULL_TREE;
4345
4346 n_low = low, n_high = high;
4347
4348 if (n_low != 0)
4349 n_low = fold_convert_loc (loc, arg0_type, n_low);
4350
4351 if (n_high != 0)
4352 n_high = fold_convert_loc (loc, arg0_type, n_high);
4353
4354 /* If we're converting arg0 from an unsigned type, to exp,
4355 a signed type, we will be doing the comparison as unsigned.
4356 The tests above have already verified that LOW and HIGH
4357 are both positive.
4358
4359 So we have to ensure that we will handle large unsigned
4360 values the same way that the current signed bounds treat
4361 negative values. */
4362
4363 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4364 {
4365 tree high_positive;
4366 tree equiv_type;
4367 /* For fixed-point modes, we need to pass the saturating flag
4368 as the 2nd parameter. */
4369 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4370 equiv_type
4371 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4372 TYPE_SATURATING (arg0_type));
4373 else
4374 equiv_type
4375 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4376
4377 /* A range without an upper bound is, naturally, unbounded.
4378 Since convert would have cropped a very large value, use
4379 the max value for the destination type. */
4380 high_positive
4381 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4382 : TYPE_MAX_VALUE (arg0_type);
4383
4384 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4385 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4386 fold_convert_loc (loc, arg0_type,
4387 high_positive),
4388 build_int_cst (arg0_type, 1));
4389
4390 /* If the low bound is specified, "and" the range with the
4391 range for which the original unsigned value will be
4392 positive. */
4393 if (low != 0)
4394 {
4395 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4396 1, fold_convert_loc (loc, arg0_type,
4397 integer_zero_node),
4398 high_positive))
4399 return NULL_TREE;
4400
4401 in_p = (n_in_p == in_p);
4402 }
4403 else
4404 {
4405 /* Otherwise, "or" the range with the range of the input
4406 that will be interpreted as negative. */
4407 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4408 1, fold_convert_loc (loc, arg0_type,
4409 integer_zero_node),
4410 high_positive))
4411 return NULL_TREE;
4412
4413 in_p = (in_p != n_in_p);
4414 }
4415 }
4416
4417 *p_low = n_low;
4418 *p_high = n_high;
4419 *p_in_p = in_p;
4420 return arg0;
4421
4422 default:
4423 return NULL_TREE;
4424 }
4425 }
4426
4427 /* Given EXP, a logical expression, set the range it is testing into
4428 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4429 actually being tested. *PLOW and *PHIGH will be made of the same
4430 type as the returned expression. If EXP is not a comparison, we
4431 will most likely not be returning a useful value and range. Set
4432 *STRICT_OVERFLOW_P to true if the return value is only valid
4433 because signed overflow is undefined; otherwise, do not change
4434 *STRICT_OVERFLOW_P. */
4435
4436 tree
4437 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4438 bool *strict_overflow_p)
4439 {
4440 enum tree_code code;
4441 tree arg0, arg1 = NULL_TREE;
4442 tree exp_type, nexp;
4443 int in_p;
4444 tree low, high;
4445 location_t loc = EXPR_LOCATION (exp);
4446
4447 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4448 and see if we can refine the range. Some of the cases below may not
4449 happen, but it doesn't seem worth worrying about this. We "continue"
4450 the outer loop when we've changed something; otherwise we "break"
4451 the switch, which will "break" the while. */
4452
4453 in_p = 0;
4454 low = high = build_int_cst (TREE_TYPE (exp), 0);
4455
4456 while (1)
4457 {
4458 code = TREE_CODE (exp);
4459 exp_type = TREE_TYPE (exp);
4460 arg0 = NULL_TREE;
4461
4462 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4463 {
4464 if (TREE_OPERAND_LENGTH (exp) > 0)
4465 arg0 = TREE_OPERAND (exp, 0);
4466 if (TREE_CODE_CLASS (code) == tcc_binary
4467 || TREE_CODE_CLASS (code) == tcc_comparison
4468 || (TREE_CODE_CLASS (code) == tcc_expression
4469 && TREE_OPERAND_LENGTH (exp) > 1))
4470 arg1 = TREE_OPERAND (exp, 1);
4471 }
4472 if (arg0 == NULL_TREE)
4473 break;
4474
4475 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4476 &high, &in_p, strict_overflow_p);
4477 if (nexp == NULL_TREE)
4478 break;
4479 exp = nexp;
4480 }
4481
4482 /* If EXP is a constant, we can evaluate whether this is true or false. */
4483 if (TREE_CODE (exp) == INTEGER_CST)
4484 {
4485 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4486 exp, 0, low, 0))
4487 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4488 exp, 1, high, 1)));
4489 low = high = 0;
4490 exp = 0;
4491 }
4492
4493 *pin_p = in_p, *plow = low, *phigh = high;
4494 return exp;
4495 }
4496 \f
4497 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4498 type, TYPE, return an expression to test if EXP is in (or out of, depending
4499 on IN_P) the range. Return 0 if the test couldn't be created. */
4500
4501 tree
4502 build_range_check (location_t loc, tree type, tree exp, int in_p,
4503 tree low, tree high)
4504 {
4505 tree etype = TREE_TYPE (exp), value;
4506
4507 #ifdef HAVE_canonicalize_funcptr_for_compare
4508 /* Disable this optimization for function pointer expressions
4509 on targets that require function pointer canonicalization. */
4510 if (HAVE_canonicalize_funcptr_for_compare
4511 && TREE_CODE (etype) == POINTER_TYPE
4512 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4513 return NULL_TREE;
4514 #endif
4515
4516 if (! in_p)
4517 {
4518 value = build_range_check (loc, type, exp, 1, low, high);
4519 if (value != 0)
4520 return invert_truthvalue_loc (loc, value);
4521
4522 return 0;
4523 }
4524
4525 if (low == 0 && high == 0)
4526 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4527
4528 if (low == 0)
4529 return fold_build2_loc (loc, LE_EXPR, type, exp,
4530 fold_convert_loc (loc, etype, high));
4531
4532 if (high == 0)
4533 return fold_build2_loc (loc, GE_EXPR, type, exp,
4534 fold_convert_loc (loc, etype, low));
4535
4536 if (operand_equal_p (low, high, 0))
4537 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4538 fold_convert_loc (loc, etype, low));
4539
4540 if (integer_zerop (low))
4541 {
4542 if (! TYPE_UNSIGNED (etype))
4543 {
4544 etype = unsigned_type_for (etype);
4545 high = fold_convert_loc (loc, etype, high);
4546 exp = fold_convert_loc (loc, etype, exp);
4547 }
4548 return build_range_check (loc, type, exp, 1, 0, high);
4549 }
4550
4551 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4552 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4553 {
4554 int prec = TYPE_PRECISION (etype);
4555
4556 if (wi::mask (prec - 1, false, prec) == high)
4557 {
4558 if (TYPE_UNSIGNED (etype))
4559 {
4560 tree signed_etype = signed_type_for (etype);
4561 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4562 etype
4563 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4564 else
4565 etype = signed_etype;
4566 exp = fold_convert_loc (loc, etype, exp);
4567 }
4568 return fold_build2_loc (loc, GT_EXPR, type, exp,
4569 build_int_cst (etype, 0));
4570 }
4571 }
4572
4573 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4574 This requires wrap-around arithmetics for the type of the expression.
4575 First make sure that arithmetics in this type is valid, then make sure
4576 that it wraps around. */
4577 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4578 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4579 TYPE_UNSIGNED (etype));
4580
4581 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4582 {
4583 tree utype, minv, maxv;
4584
4585 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4586 for the type in question, as we rely on this here. */
4587 utype = unsigned_type_for (etype);
4588 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4589 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4590 build_int_cst (TREE_TYPE (maxv), 1), 1);
4591 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4592
4593 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4594 minv, 1, maxv, 1)))
4595 etype = utype;
4596 else
4597 return 0;
4598 }
4599
4600 high = fold_convert_loc (loc, etype, high);
4601 low = fold_convert_loc (loc, etype, low);
4602 exp = fold_convert_loc (loc, etype, exp);
4603
4604 value = const_binop (MINUS_EXPR, high, low);
4605
4606
4607 if (POINTER_TYPE_P (etype))
4608 {
4609 if (value != 0 && !TREE_OVERFLOW (value))
4610 {
4611 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4612 return build_range_check (loc, type,
4613 fold_build_pointer_plus_loc (loc, exp, low),
4614 1, build_int_cst (etype, 0), value);
4615 }
4616 return 0;
4617 }
4618
4619 if (value != 0 && !TREE_OVERFLOW (value))
4620 return build_range_check (loc, type,
4621 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4622 1, build_int_cst (etype, 0), value);
4623
4624 return 0;
4625 }
4626 \f
4627 /* Return the predecessor of VAL in its type, handling the infinite case. */
4628
4629 static tree
4630 range_predecessor (tree val)
4631 {
4632 tree type = TREE_TYPE (val);
4633
4634 if (INTEGRAL_TYPE_P (type)
4635 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4636 return 0;
4637 else
4638 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4639 build_int_cst (TREE_TYPE (val), 1), 0);
4640 }
4641
4642 /* Return the successor of VAL in its type, handling the infinite case. */
4643
4644 static tree
4645 range_successor (tree val)
4646 {
4647 tree type = TREE_TYPE (val);
4648
4649 if (INTEGRAL_TYPE_P (type)
4650 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4651 return 0;
4652 else
4653 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4654 build_int_cst (TREE_TYPE (val), 1), 0);
4655 }
4656
4657 /* Given two ranges, see if we can merge them into one. Return 1 if we
4658 can, 0 if we can't. Set the output range into the specified parameters. */
4659
4660 bool
4661 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4662 tree high0, int in1_p, tree low1, tree high1)
4663 {
4664 int no_overlap;
4665 int subset;
4666 int temp;
4667 tree tem;
4668 int in_p;
4669 tree low, high;
4670 int lowequal = ((low0 == 0 && low1 == 0)
4671 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4672 low0, 0, low1, 0)));
4673 int highequal = ((high0 == 0 && high1 == 0)
4674 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4675 high0, 1, high1, 1)));
4676
4677 /* Make range 0 be the range that starts first, or ends last if they
4678 start at the same value. Swap them if it isn't. */
4679 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4680 low0, 0, low1, 0))
4681 || (lowequal
4682 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4683 high1, 1, high0, 1))))
4684 {
4685 temp = in0_p, in0_p = in1_p, in1_p = temp;
4686 tem = low0, low0 = low1, low1 = tem;
4687 tem = high0, high0 = high1, high1 = tem;
4688 }
4689
4690 /* Now flag two cases, whether the ranges are disjoint or whether the
4691 second range is totally subsumed in the first. Note that the tests
4692 below are simplified by the ones above. */
4693 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4694 high0, 1, low1, 0));
4695 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4696 high1, 1, high0, 1));
4697
4698 /* We now have four cases, depending on whether we are including or
4699 excluding the two ranges. */
4700 if (in0_p && in1_p)
4701 {
4702 /* If they don't overlap, the result is false. If the second range
4703 is a subset it is the result. Otherwise, the range is from the start
4704 of the second to the end of the first. */
4705 if (no_overlap)
4706 in_p = 0, low = high = 0;
4707 else if (subset)
4708 in_p = 1, low = low1, high = high1;
4709 else
4710 in_p = 1, low = low1, high = high0;
4711 }
4712
4713 else if (in0_p && ! in1_p)
4714 {
4715 /* If they don't overlap, the result is the first range. If they are
4716 equal, the result is false. If the second range is a subset of the
4717 first, and the ranges begin at the same place, we go from just after
4718 the end of the second range to the end of the first. If the second
4719 range is not a subset of the first, or if it is a subset and both
4720 ranges end at the same place, the range starts at the start of the
4721 first range and ends just before the second range.
4722 Otherwise, we can't describe this as a single range. */
4723 if (no_overlap)
4724 in_p = 1, low = low0, high = high0;
4725 else if (lowequal && highequal)
4726 in_p = 0, low = high = 0;
4727 else if (subset && lowequal)
4728 {
4729 low = range_successor (high1);
4730 high = high0;
4731 in_p = 1;
4732 if (low == 0)
4733 {
4734 /* We are in the weird situation where high0 > high1 but
4735 high1 has no successor. Punt. */
4736 return 0;
4737 }
4738 }
4739 else if (! subset || highequal)
4740 {
4741 low = low0;
4742 high = range_predecessor (low1);
4743 in_p = 1;
4744 if (high == 0)
4745 {
4746 /* low0 < low1 but low1 has no predecessor. Punt. */
4747 return 0;
4748 }
4749 }
4750 else
4751 return 0;
4752 }
4753
4754 else if (! in0_p && in1_p)
4755 {
4756 /* If they don't overlap, the result is the second range. If the second
4757 is a subset of the first, the result is false. Otherwise,
4758 the range starts just after the first range and ends at the
4759 end of the second. */
4760 if (no_overlap)
4761 in_p = 1, low = low1, high = high1;
4762 else if (subset || highequal)
4763 in_p = 0, low = high = 0;
4764 else
4765 {
4766 low = range_successor (high0);
4767 high = high1;
4768 in_p = 1;
4769 if (low == 0)
4770 {
4771 /* high1 > high0 but high0 has no successor. Punt. */
4772 return 0;
4773 }
4774 }
4775 }
4776
4777 else
4778 {
4779 /* The case where we are excluding both ranges. Here the complex case
4780 is if they don't overlap. In that case, the only time we have a
4781 range is if they are adjacent. If the second is a subset of the
4782 first, the result is the first. Otherwise, the range to exclude
4783 starts at the beginning of the first range and ends at the end of the
4784 second. */
4785 if (no_overlap)
4786 {
4787 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4788 range_successor (high0),
4789 1, low1, 0)))
4790 in_p = 0, low = low0, high = high1;
4791 else
4792 {
4793 /* Canonicalize - [min, x] into - [-, x]. */
4794 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4795 switch (TREE_CODE (TREE_TYPE (low0)))
4796 {
4797 case ENUMERAL_TYPE:
4798 if (TYPE_PRECISION (TREE_TYPE (low0))
4799 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4800 break;
4801 /* FALLTHROUGH */
4802 case INTEGER_TYPE:
4803 if (tree_int_cst_equal (low0,
4804 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4805 low0 = 0;
4806 break;
4807 case POINTER_TYPE:
4808 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4809 && integer_zerop (low0))
4810 low0 = 0;
4811 break;
4812 default:
4813 break;
4814 }
4815
4816 /* Canonicalize - [x, max] into - [x, -]. */
4817 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4818 switch (TREE_CODE (TREE_TYPE (high1)))
4819 {
4820 case ENUMERAL_TYPE:
4821 if (TYPE_PRECISION (TREE_TYPE (high1))
4822 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4823 break;
4824 /* FALLTHROUGH */
4825 case INTEGER_TYPE:
4826 if (tree_int_cst_equal (high1,
4827 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4828 high1 = 0;
4829 break;
4830 case POINTER_TYPE:
4831 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4832 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4833 high1, 1,
4834 build_int_cst (TREE_TYPE (high1), 1),
4835 1)))
4836 high1 = 0;
4837 break;
4838 default:
4839 break;
4840 }
4841
4842 /* The ranges might be also adjacent between the maximum and
4843 minimum values of the given type. For
4844 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4845 return + [x + 1, y - 1]. */
4846 if (low0 == 0 && high1 == 0)
4847 {
4848 low = range_successor (high0);
4849 high = range_predecessor (low1);
4850 if (low == 0 || high == 0)
4851 return 0;
4852
4853 in_p = 1;
4854 }
4855 else
4856 return 0;
4857 }
4858 }
4859 else if (subset)
4860 in_p = 0, low = low0, high = high0;
4861 else
4862 in_p = 0, low = low0, high = high1;
4863 }
4864
4865 *pin_p = in_p, *plow = low, *phigh = high;
4866 return 1;
4867 }
4868 \f
4869
4870 /* Subroutine of fold, looking inside expressions of the form
4871 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4872 of the COND_EXPR. This function is being used also to optimize
4873 A op B ? C : A, by reversing the comparison first.
4874
4875 Return a folded expression whose code is not a COND_EXPR
4876 anymore, or NULL_TREE if no folding opportunity is found. */
4877
4878 static tree
4879 fold_cond_expr_with_comparison (location_t loc, tree type,
4880 tree arg0, tree arg1, tree arg2)
4881 {
4882 enum tree_code comp_code = TREE_CODE (arg0);
4883 tree arg00 = TREE_OPERAND (arg0, 0);
4884 tree arg01 = TREE_OPERAND (arg0, 1);
4885 tree arg1_type = TREE_TYPE (arg1);
4886 tree tem;
4887
4888 STRIP_NOPS (arg1);
4889 STRIP_NOPS (arg2);
4890
4891 /* If we have A op 0 ? A : -A, consider applying the following
4892 transformations:
4893
4894 A == 0? A : -A same as -A
4895 A != 0? A : -A same as A
4896 A >= 0? A : -A same as abs (A)
4897 A > 0? A : -A same as abs (A)
4898 A <= 0? A : -A same as -abs (A)
4899 A < 0? A : -A same as -abs (A)
4900
4901 None of these transformations work for modes with signed
4902 zeros. If A is +/-0, the first two transformations will
4903 change the sign of the result (from +0 to -0, or vice
4904 versa). The last four will fix the sign of the result,
4905 even though the original expressions could be positive or
4906 negative, depending on the sign of A.
4907
4908 Note that all these transformations are correct if A is
4909 NaN, since the two alternatives (A and -A) are also NaNs. */
4910 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4911 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4912 ? real_zerop (arg01)
4913 : integer_zerop (arg01))
4914 && ((TREE_CODE (arg2) == NEGATE_EXPR
4915 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4916 /* In the case that A is of the form X-Y, '-A' (arg2) may
4917 have already been folded to Y-X, check for that. */
4918 || (TREE_CODE (arg1) == MINUS_EXPR
4919 && TREE_CODE (arg2) == MINUS_EXPR
4920 && operand_equal_p (TREE_OPERAND (arg1, 0),
4921 TREE_OPERAND (arg2, 1), 0)
4922 && operand_equal_p (TREE_OPERAND (arg1, 1),
4923 TREE_OPERAND (arg2, 0), 0))))
4924 switch (comp_code)
4925 {
4926 case EQ_EXPR:
4927 case UNEQ_EXPR:
4928 tem = fold_convert_loc (loc, arg1_type, arg1);
4929 return pedantic_non_lvalue_loc (loc,
4930 fold_convert_loc (loc, type,
4931 negate_expr (tem)));
4932 case NE_EXPR:
4933 case LTGT_EXPR:
4934 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4935 case UNGE_EXPR:
4936 case UNGT_EXPR:
4937 if (flag_trapping_math)
4938 break;
4939 /* Fall through. */
4940 case GE_EXPR:
4941 case GT_EXPR:
4942 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4943 arg1 = fold_convert_loc (loc, signed_type_for
4944 (TREE_TYPE (arg1)), arg1);
4945 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4946 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4947 case UNLE_EXPR:
4948 case UNLT_EXPR:
4949 if (flag_trapping_math)
4950 break;
4951 case LE_EXPR:
4952 case LT_EXPR:
4953 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4954 arg1 = fold_convert_loc (loc, signed_type_for
4955 (TREE_TYPE (arg1)), arg1);
4956 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4957 return negate_expr (fold_convert_loc (loc, type, tem));
4958 default:
4959 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4960 break;
4961 }
4962
4963 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4964 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4965 both transformations are correct when A is NaN: A != 0
4966 is then true, and A == 0 is false. */
4967
4968 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4969 && integer_zerop (arg01) && integer_zerop (arg2))
4970 {
4971 if (comp_code == NE_EXPR)
4972 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4973 else if (comp_code == EQ_EXPR)
4974 return build_zero_cst (type);
4975 }
4976
4977 /* Try some transformations of A op B ? A : B.
4978
4979 A == B? A : B same as B
4980 A != B? A : B same as A
4981 A >= B? A : B same as max (A, B)
4982 A > B? A : B same as max (B, A)
4983 A <= B? A : B same as min (A, B)
4984 A < B? A : B same as min (B, A)
4985
4986 As above, these transformations don't work in the presence
4987 of signed zeros. For example, if A and B are zeros of
4988 opposite sign, the first two transformations will change
4989 the sign of the result. In the last four, the original
4990 expressions give different results for (A=+0, B=-0) and
4991 (A=-0, B=+0), but the transformed expressions do not.
4992
4993 The first two transformations are correct if either A or B
4994 is a NaN. In the first transformation, the condition will
4995 be false, and B will indeed be chosen. In the case of the
4996 second transformation, the condition A != B will be true,
4997 and A will be chosen.
4998
4999 The conversions to max() and min() are not correct if B is
5000 a number and A is not. The conditions in the original
5001 expressions will be false, so all four give B. The min()
5002 and max() versions would give a NaN instead. */
5003 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5004 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5005 /* Avoid these transformations if the COND_EXPR may be used
5006 as an lvalue in the C++ front-end. PR c++/19199. */
5007 && (in_gimple_form
5008 || VECTOR_TYPE_P (type)
5009 || (! lang_GNU_CXX ()
5010 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5011 || ! maybe_lvalue_p (arg1)
5012 || ! maybe_lvalue_p (arg2)))
5013 {
5014 tree comp_op0 = arg00;
5015 tree comp_op1 = arg01;
5016 tree comp_type = TREE_TYPE (comp_op0);
5017
5018 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5019 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5020 {
5021 comp_type = type;
5022 comp_op0 = arg1;
5023 comp_op1 = arg2;
5024 }
5025
5026 switch (comp_code)
5027 {
5028 case EQ_EXPR:
5029 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5030 case NE_EXPR:
5031 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5032 case LE_EXPR:
5033 case LT_EXPR:
5034 case UNLE_EXPR:
5035 case UNLT_EXPR:
5036 /* In C++ a ?: expression can be an lvalue, so put the
5037 operand which will be used if they are equal first
5038 so that we can convert this back to the
5039 corresponding COND_EXPR. */
5040 if (!HONOR_NANS (arg1))
5041 {
5042 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5043 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5044 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5045 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5046 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5047 comp_op1, comp_op0);
5048 return pedantic_non_lvalue_loc (loc,
5049 fold_convert_loc (loc, type, tem));
5050 }
5051 break;
5052 case GE_EXPR:
5053 case GT_EXPR:
5054 case UNGE_EXPR:
5055 case UNGT_EXPR:
5056 if (!HONOR_NANS (arg1))
5057 {
5058 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5059 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5060 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5061 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5062 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5063 comp_op1, comp_op0);
5064 return pedantic_non_lvalue_loc (loc,
5065 fold_convert_loc (loc, type, tem));
5066 }
5067 break;
5068 case UNEQ_EXPR:
5069 if (!HONOR_NANS (arg1))
5070 return pedantic_non_lvalue_loc (loc,
5071 fold_convert_loc (loc, type, arg2));
5072 break;
5073 case LTGT_EXPR:
5074 if (!HONOR_NANS (arg1))
5075 return pedantic_non_lvalue_loc (loc,
5076 fold_convert_loc (loc, type, arg1));
5077 break;
5078 default:
5079 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5080 break;
5081 }
5082 }
5083
5084 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5085 we might still be able to simplify this. For example,
5086 if C1 is one less or one more than C2, this might have started
5087 out as a MIN or MAX and been transformed by this function.
5088 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5089
5090 if (INTEGRAL_TYPE_P (type)
5091 && TREE_CODE (arg01) == INTEGER_CST
5092 && TREE_CODE (arg2) == INTEGER_CST)
5093 switch (comp_code)
5094 {
5095 case EQ_EXPR:
5096 if (TREE_CODE (arg1) == INTEGER_CST)
5097 break;
5098 /* We can replace A with C1 in this case. */
5099 arg1 = fold_convert_loc (loc, type, arg01);
5100 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5101
5102 case LT_EXPR:
5103 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5104 MIN_EXPR, to preserve the signedness of the comparison. */
5105 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5106 OEP_ONLY_CONST)
5107 && operand_equal_p (arg01,
5108 const_binop (PLUS_EXPR, arg2,
5109 build_int_cst (type, 1)),
5110 OEP_ONLY_CONST))
5111 {
5112 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5113 fold_convert_loc (loc, TREE_TYPE (arg00),
5114 arg2));
5115 return pedantic_non_lvalue_loc (loc,
5116 fold_convert_loc (loc, type, tem));
5117 }
5118 break;
5119
5120 case LE_EXPR:
5121 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5122 as above. */
5123 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5124 OEP_ONLY_CONST)
5125 && operand_equal_p (arg01,
5126 const_binop (MINUS_EXPR, arg2,
5127 build_int_cst (type, 1)),
5128 OEP_ONLY_CONST))
5129 {
5130 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5131 fold_convert_loc (loc, TREE_TYPE (arg00),
5132 arg2));
5133 return pedantic_non_lvalue_loc (loc,
5134 fold_convert_loc (loc, type, tem));
5135 }
5136 break;
5137
5138 case GT_EXPR:
5139 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5140 MAX_EXPR, to preserve the signedness of the comparison. */
5141 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5142 OEP_ONLY_CONST)
5143 && operand_equal_p (arg01,
5144 const_binop (MINUS_EXPR, arg2,
5145 build_int_cst (type, 1)),
5146 OEP_ONLY_CONST))
5147 {
5148 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5149 fold_convert_loc (loc, TREE_TYPE (arg00),
5150 arg2));
5151 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5152 }
5153 break;
5154
5155 case GE_EXPR:
5156 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5157 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5158 OEP_ONLY_CONST)
5159 && operand_equal_p (arg01,
5160 const_binop (PLUS_EXPR, arg2,
5161 build_int_cst (type, 1)),
5162 OEP_ONLY_CONST))
5163 {
5164 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5165 fold_convert_loc (loc, TREE_TYPE (arg00),
5166 arg2));
5167 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5168 }
5169 break;
5170 case NE_EXPR:
5171 break;
5172 default:
5173 gcc_unreachable ();
5174 }
5175
5176 return NULL_TREE;
5177 }
5178
5179
5180 \f
5181 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5182 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5183 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5184 false) >= 2)
5185 #endif
5186
5187 /* EXP is some logical combination of boolean tests. See if we can
5188 merge it into some range test. Return the new tree if so. */
5189
5190 static tree
5191 fold_range_test (location_t loc, enum tree_code code, tree type,
5192 tree op0, tree op1)
5193 {
5194 int or_op = (code == TRUTH_ORIF_EXPR
5195 || code == TRUTH_OR_EXPR);
5196 int in0_p, in1_p, in_p;
5197 tree low0, low1, low, high0, high1, high;
5198 bool strict_overflow_p = false;
5199 tree tem, lhs, rhs;
5200 const char * const warnmsg = G_("assuming signed overflow does not occur "
5201 "when simplifying range test");
5202
5203 if (!INTEGRAL_TYPE_P (type))
5204 return 0;
5205
5206 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5207 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5208
5209 /* If this is an OR operation, invert both sides; we will invert
5210 again at the end. */
5211 if (or_op)
5212 in0_p = ! in0_p, in1_p = ! in1_p;
5213
5214 /* If both expressions are the same, if we can merge the ranges, and we
5215 can build the range test, return it or it inverted. If one of the
5216 ranges is always true or always false, consider it to be the same
5217 expression as the other. */
5218 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5219 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5220 in1_p, low1, high1)
5221 && 0 != (tem = (build_range_check (loc, type,
5222 lhs != 0 ? lhs
5223 : rhs != 0 ? rhs : integer_zero_node,
5224 in_p, low, high))))
5225 {
5226 if (strict_overflow_p)
5227 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5228 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5229 }
5230
5231 /* On machines where the branch cost is expensive, if this is a
5232 short-circuited branch and the underlying object on both sides
5233 is the same, make a non-short-circuit operation. */
5234 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5235 && lhs != 0 && rhs != 0
5236 && (code == TRUTH_ANDIF_EXPR
5237 || code == TRUTH_ORIF_EXPR)
5238 && operand_equal_p (lhs, rhs, 0))
5239 {
5240 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5241 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5242 which cases we can't do this. */
5243 if (simple_operand_p (lhs))
5244 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5245 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5246 type, op0, op1);
5247
5248 else if (!lang_hooks.decls.global_bindings_p ()
5249 && !CONTAINS_PLACEHOLDER_P (lhs))
5250 {
5251 tree common = save_expr (lhs);
5252
5253 if (0 != (lhs = build_range_check (loc, type, common,
5254 or_op ? ! in0_p : in0_p,
5255 low0, high0))
5256 && (0 != (rhs = build_range_check (loc, type, common,
5257 or_op ? ! in1_p : in1_p,
5258 low1, high1))))
5259 {
5260 if (strict_overflow_p)
5261 fold_overflow_warning (warnmsg,
5262 WARN_STRICT_OVERFLOW_COMPARISON);
5263 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5264 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5265 type, lhs, rhs);
5266 }
5267 }
5268 }
5269
5270 return 0;
5271 }
5272 \f
5273 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5274 bit value. Arrange things so the extra bits will be set to zero if and
5275 only if C is signed-extended to its full width. If MASK is nonzero,
5276 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5277
5278 static tree
5279 unextend (tree c, int p, int unsignedp, tree mask)
5280 {
5281 tree type = TREE_TYPE (c);
5282 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5283 tree temp;
5284
5285 if (p == modesize || unsignedp)
5286 return c;
5287
5288 /* We work by getting just the sign bit into the low-order bit, then
5289 into the high-order bit, then sign-extend. We then XOR that value
5290 with C. */
5291 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5292
5293 /* We must use a signed type in order to get an arithmetic right shift.
5294 However, we must also avoid introducing accidental overflows, so that
5295 a subsequent call to integer_zerop will work. Hence we must
5296 do the type conversion here. At this point, the constant is either
5297 zero or one, and the conversion to a signed type can never overflow.
5298 We could get an overflow if this conversion is done anywhere else. */
5299 if (TYPE_UNSIGNED (type))
5300 temp = fold_convert (signed_type_for (type), temp);
5301
5302 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5303 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5304 if (mask != 0)
5305 temp = const_binop (BIT_AND_EXPR, temp,
5306 fold_convert (TREE_TYPE (c), mask));
5307 /* If necessary, convert the type back to match the type of C. */
5308 if (TYPE_UNSIGNED (type))
5309 temp = fold_convert (type, temp);
5310
5311 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5312 }
5313 \f
5314 /* For an expression that has the form
5315 (A && B) || ~B
5316 or
5317 (A || B) && ~B,
5318 we can drop one of the inner expressions and simplify to
5319 A || ~B
5320 or
5321 A && ~B
5322 LOC is the location of the resulting expression. OP is the inner
5323 logical operation; the left-hand side in the examples above, while CMPOP
5324 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5325 removing a condition that guards another, as in
5326 (A != NULL && A->...) || A == NULL
5327 which we must not transform. If RHS_ONLY is true, only eliminate the
5328 right-most operand of the inner logical operation. */
5329
5330 static tree
5331 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5332 bool rhs_only)
5333 {
5334 tree type = TREE_TYPE (cmpop);
5335 enum tree_code code = TREE_CODE (cmpop);
5336 enum tree_code truthop_code = TREE_CODE (op);
5337 tree lhs = TREE_OPERAND (op, 0);
5338 tree rhs = TREE_OPERAND (op, 1);
5339 tree orig_lhs = lhs, orig_rhs = rhs;
5340 enum tree_code rhs_code = TREE_CODE (rhs);
5341 enum tree_code lhs_code = TREE_CODE (lhs);
5342 enum tree_code inv_code;
5343
5344 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5345 return NULL_TREE;
5346
5347 if (TREE_CODE_CLASS (code) != tcc_comparison)
5348 return NULL_TREE;
5349
5350 if (rhs_code == truthop_code)
5351 {
5352 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5353 if (newrhs != NULL_TREE)
5354 {
5355 rhs = newrhs;
5356 rhs_code = TREE_CODE (rhs);
5357 }
5358 }
5359 if (lhs_code == truthop_code && !rhs_only)
5360 {
5361 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5362 if (newlhs != NULL_TREE)
5363 {
5364 lhs = newlhs;
5365 lhs_code = TREE_CODE (lhs);
5366 }
5367 }
5368
5369 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5370 if (inv_code == rhs_code
5371 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5372 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5373 return lhs;
5374 if (!rhs_only && inv_code == lhs_code
5375 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5376 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5377 return rhs;
5378 if (rhs != orig_rhs || lhs != orig_lhs)
5379 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5380 lhs, rhs);
5381 return NULL_TREE;
5382 }
5383
5384 /* Find ways of folding logical expressions of LHS and RHS:
5385 Try to merge two comparisons to the same innermost item.
5386 Look for range tests like "ch >= '0' && ch <= '9'".
5387 Look for combinations of simple terms on machines with expensive branches
5388 and evaluate the RHS unconditionally.
5389
5390 For example, if we have p->a == 2 && p->b == 4 and we can make an
5391 object large enough to span both A and B, we can do this with a comparison
5392 against the object ANDed with the a mask.
5393
5394 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5395 operations to do this with one comparison.
5396
5397 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5398 function and the one above.
5399
5400 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5401 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5402
5403 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5404 two operands.
5405
5406 We return the simplified tree or 0 if no optimization is possible. */
5407
5408 static tree
5409 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5410 tree lhs, tree rhs)
5411 {
5412 /* If this is the "or" of two comparisons, we can do something if
5413 the comparisons are NE_EXPR. If this is the "and", we can do something
5414 if the comparisons are EQ_EXPR. I.e.,
5415 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5416
5417 WANTED_CODE is this operation code. For single bit fields, we can
5418 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5419 comparison for one-bit fields. */
5420
5421 enum tree_code wanted_code;
5422 enum tree_code lcode, rcode;
5423 tree ll_arg, lr_arg, rl_arg, rr_arg;
5424 tree ll_inner, lr_inner, rl_inner, rr_inner;
5425 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5426 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5427 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5428 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5429 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5430 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5431 machine_mode lnmode, rnmode;
5432 tree ll_mask, lr_mask, rl_mask, rr_mask;
5433 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5434 tree l_const, r_const;
5435 tree lntype, rntype, result;
5436 HOST_WIDE_INT first_bit, end_bit;
5437 int volatilep;
5438
5439 /* Start by getting the comparison codes. Fail if anything is volatile.
5440 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5441 it were surrounded with a NE_EXPR. */
5442
5443 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5444 return 0;
5445
5446 lcode = TREE_CODE (lhs);
5447 rcode = TREE_CODE (rhs);
5448
5449 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5450 {
5451 lhs = build2 (NE_EXPR, truth_type, lhs,
5452 build_int_cst (TREE_TYPE (lhs), 0));
5453 lcode = NE_EXPR;
5454 }
5455
5456 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5457 {
5458 rhs = build2 (NE_EXPR, truth_type, rhs,
5459 build_int_cst (TREE_TYPE (rhs), 0));
5460 rcode = NE_EXPR;
5461 }
5462
5463 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5464 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5465 return 0;
5466
5467 ll_arg = TREE_OPERAND (lhs, 0);
5468 lr_arg = TREE_OPERAND (lhs, 1);
5469 rl_arg = TREE_OPERAND (rhs, 0);
5470 rr_arg = TREE_OPERAND (rhs, 1);
5471
5472 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5473 if (simple_operand_p (ll_arg)
5474 && simple_operand_p (lr_arg))
5475 {
5476 if (operand_equal_p (ll_arg, rl_arg, 0)
5477 && operand_equal_p (lr_arg, rr_arg, 0))
5478 {
5479 result = combine_comparisons (loc, code, lcode, rcode,
5480 truth_type, ll_arg, lr_arg);
5481 if (result)
5482 return result;
5483 }
5484 else if (operand_equal_p (ll_arg, rr_arg, 0)
5485 && operand_equal_p (lr_arg, rl_arg, 0))
5486 {
5487 result = combine_comparisons (loc, code, lcode,
5488 swap_tree_comparison (rcode),
5489 truth_type, ll_arg, lr_arg);
5490 if (result)
5491 return result;
5492 }
5493 }
5494
5495 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5496 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5497
5498 /* If the RHS can be evaluated unconditionally and its operands are
5499 simple, it wins to evaluate the RHS unconditionally on machines
5500 with expensive branches. In this case, this isn't a comparison
5501 that can be merged. */
5502
5503 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5504 false) >= 2
5505 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5506 && simple_operand_p (rl_arg)
5507 && simple_operand_p (rr_arg))
5508 {
5509 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5510 if (code == TRUTH_OR_EXPR
5511 && lcode == NE_EXPR && integer_zerop (lr_arg)
5512 && rcode == NE_EXPR && integer_zerop (rr_arg)
5513 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5514 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5515 return build2_loc (loc, NE_EXPR, truth_type,
5516 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5517 ll_arg, rl_arg),
5518 build_int_cst (TREE_TYPE (ll_arg), 0));
5519
5520 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5521 if (code == TRUTH_AND_EXPR
5522 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5523 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5524 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5525 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5526 return build2_loc (loc, EQ_EXPR, truth_type,
5527 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5528 ll_arg, rl_arg),
5529 build_int_cst (TREE_TYPE (ll_arg), 0));
5530 }
5531
5532 /* See if the comparisons can be merged. Then get all the parameters for
5533 each side. */
5534
5535 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5536 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5537 return 0;
5538
5539 volatilep = 0;
5540 ll_inner = decode_field_reference (loc, ll_arg,
5541 &ll_bitsize, &ll_bitpos, &ll_mode,
5542 &ll_unsignedp, &volatilep, &ll_mask,
5543 &ll_and_mask);
5544 lr_inner = decode_field_reference (loc, lr_arg,
5545 &lr_bitsize, &lr_bitpos, &lr_mode,
5546 &lr_unsignedp, &volatilep, &lr_mask,
5547 &lr_and_mask);
5548 rl_inner = decode_field_reference (loc, rl_arg,
5549 &rl_bitsize, &rl_bitpos, &rl_mode,
5550 &rl_unsignedp, &volatilep, &rl_mask,
5551 &rl_and_mask);
5552 rr_inner = decode_field_reference (loc, rr_arg,
5553 &rr_bitsize, &rr_bitpos, &rr_mode,
5554 &rr_unsignedp, &volatilep, &rr_mask,
5555 &rr_and_mask);
5556
5557 /* It must be true that the inner operation on the lhs of each
5558 comparison must be the same if we are to be able to do anything.
5559 Then see if we have constants. If not, the same must be true for
5560 the rhs's. */
5561 if (volatilep || ll_inner == 0 || rl_inner == 0
5562 || ! operand_equal_p (ll_inner, rl_inner, 0))
5563 return 0;
5564
5565 if (TREE_CODE (lr_arg) == INTEGER_CST
5566 && TREE_CODE (rr_arg) == INTEGER_CST)
5567 l_const = lr_arg, r_const = rr_arg;
5568 else if (lr_inner == 0 || rr_inner == 0
5569 || ! operand_equal_p (lr_inner, rr_inner, 0))
5570 return 0;
5571 else
5572 l_const = r_const = 0;
5573
5574 /* If either comparison code is not correct for our logical operation,
5575 fail. However, we can convert a one-bit comparison against zero into
5576 the opposite comparison against that bit being set in the field. */
5577
5578 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5579 if (lcode != wanted_code)
5580 {
5581 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5582 {
5583 /* Make the left operand unsigned, since we are only interested
5584 in the value of one bit. Otherwise we are doing the wrong
5585 thing below. */
5586 ll_unsignedp = 1;
5587 l_const = ll_mask;
5588 }
5589 else
5590 return 0;
5591 }
5592
5593 /* This is analogous to the code for l_const above. */
5594 if (rcode != wanted_code)
5595 {
5596 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5597 {
5598 rl_unsignedp = 1;
5599 r_const = rl_mask;
5600 }
5601 else
5602 return 0;
5603 }
5604
5605 /* See if we can find a mode that contains both fields being compared on
5606 the left. If we can't, fail. Otherwise, update all constants and masks
5607 to be relative to a field of that size. */
5608 first_bit = MIN (ll_bitpos, rl_bitpos);
5609 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5610 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5611 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5612 volatilep);
5613 if (lnmode == VOIDmode)
5614 return 0;
5615
5616 lnbitsize = GET_MODE_BITSIZE (lnmode);
5617 lnbitpos = first_bit & ~ (lnbitsize - 1);
5618 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5619 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5620
5621 if (BYTES_BIG_ENDIAN)
5622 {
5623 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5624 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5625 }
5626
5627 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5628 size_int (xll_bitpos));
5629 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5630 size_int (xrl_bitpos));
5631
5632 if (l_const)
5633 {
5634 l_const = fold_convert_loc (loc, lntype, l_const);
5635 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5636 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5637 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5638 fold_build1_loc (loc, BIT_NOT_EXPR,
5639 lntype, ll_mask))))
5640 {
5641 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5642
5643 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5644 }
5645 }
5646 if (r_const)
5647 {
5648 r_const = fold_convert_loc (loc, lntype, r_const);
5649 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5650 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5651 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5652 fold_build1_loc (loc, BIT_NOT_EXPR,
5653 lntype, rl_mask))))
5654 {
5655 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5656
5657 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5658 }
5659 }
5660
5661 /* If the right sides are not constant, do the same for it. Also,
5662 disallow this optimization if a size or signedness mismatch occurs
5663 between the left and right sides. */
5664 if (l_const == 0)
5665 {
5666 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5667 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5668 /* Make sure the two fields on the right
5669 correspond to the left without being swapped. */
5670 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5671 return 0;
5672
5673 first_bit = MIN (lr_bitpos, rr_bitpos);
5674 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5675 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5676 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5677 volatilep);
5678 if (rnmode == VOIDmode)
5679 return 0;
5680
5681 rnbitsize = GET_MODE_BITSIZE (rnmode);
5682 rnbitpos = first_bit & ~ (rnbitsize - 1);
5683 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5684 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5685
5686 if (BYTES_BIG_ENDIAN)
5687 {
5688 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5689 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5690 }
5691
5692 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5693 rntype, lr_mask),
5694 size_int (xlr_bitpos));
5695 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5696 rntype, rr_mask),
5697 size_int (xrr_bitpos));
5698
5699 /* Make a mask that corresponds to both fields being compared.
5700 Do this for both items being compared. If the operands are the
5701 same size and the bits being compared are in the same position
5702 then we can do this by masking both and comparing the masked
5703 results. */
5704 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5705 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5706 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5707 {
5708 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5709 ll_unsignedp || rl_unsignedp);
5710 if (! all_ones_mask_p (ll_mask, lnbitsize))
5711 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5712
5713 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5714 lr_unsignedp || rr_unsignedp);
5715 if (! all_ones_mask_p (lr_mask, rnbitsize))
5716 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5717
5718 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5719 }
5720
5721 /* There is still another way we can do something: If both pairs of
5722 fields being compared are adjacent, we may be able to make a wider
5723 field containing them both.
5724
5725 Note that we still must mask the lhs/rhs expressions. Furthermore,
5726 the mask must be shifted to account for the shift done by
5727 make_bit_field_ref. */
5728 if ((ll_bitsize + ll_bitpos == rl_bitpos
5729 && lr_bitsize + lr_bitpos == rr_bitpos)
5730 || (ll_bitpos == rl_bitpos + rl_bitsize
5731 && lr_bitpos == rr_bitpos + rr_bitsize))
5732 {
5733 tree type;
5734
5735 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5736 ll_bitsize + rl_bitsize,
5737 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5738 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5739 lr_bitsize + rr_bitsize,
5740 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5741
5742 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5743 size_int (MIN (xll_bitpos, xrl_bitpos)));
5744 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5745 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5746
5747 /* Convert to the smaller type before masking out unwanted bits. */
5748 type = lntype;
5749 if (lntype != rntype)
5750 {
5751 if (lnbitsize > rnbitsize)
5752 {
5753 lhs = fold_convert_loc (loc, rntype, lhs);
5754 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5755 type = rntype;
5756 }
5757 else if (lnbitsize < rnbitsize)
5758 {
5759 rhs = fold_convert_loc (loc, lntype, rhs);
5760 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5761 type = lntype;
5762 }
5763 }
5764
5765 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5766 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5767
5768 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5769 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5770
5771 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5772 }
5773
5774 return 0;
5775 }
5776
5777 /* Handle the case of comparisons with constants. If there is something in
5778 common between the masks, those bits of the constants must be the same.
5779 If not, the condition is always false. Test for this to avoid generating
5780 incorrect code below. */
5781 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5782 if (! integer_zerop (result)
5783 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5784 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5785 {
5786 if (wanted_code == NE_EXPR)
5787 {
5788 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5789 return constant_boolean_node (true, truth_type);
5790 }
5791 else
5792 {
5793 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5794 return constant_boolean_node (false, truth_type);
5795 }
5796 }
5797
5798 /* Construct the expression we will return. First get the component
5799 reference we will make. Unless the mask is all ones the width of
5800 that field, perform the mask operation. Then compare with the
5801 merged constant. */
5802 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5803 ll_unsignedp || rl_unsignedp);
5804
5805 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5806 if (! all_ones_mask_p (ll_mask, lnbitsize))
5807 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5808
5809 return build2_loc (loc, wanted_code, truth_type, result,
5810 const_binop (BIT_IOR_EXPR, l_const, r_const));
5811 }
5812 \f
5813 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5814 constant. */
5815
5816 static tree
5817 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5818 tree op0, tree op1)
5819 {
5820 tree arg0 = op0;
5821 enum tree_code op_code;
5822 tree comp_const;
5823 tree minmax_const;
5824 int consts_equal, consts_lt;
5825 tree inner;
5826
5827 STRIP_SIGN_NOPS (arg0);
5828
5829 op_code = TREE_CODE (arg0);
5830 minmax_const = TREE_OPERAND (arg0, 1);
5831 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5832 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5833 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5834 inner = TREE_OPERAND (arg0, 0);
5835
5836 /* If something does not permit us to optimize, return the original tree. */
5837 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5838 || TREE_CODE (comp_const) != INTEGER_CST
5839 || TREE_OVERFLOW (comp_const)
5840 || TREE_CODE (minmax_const) != INTEGER_CST
5841 || TREE_OVERFLOW (minmax_const))
5842 return NULL_TREE;
5843
5844 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5845 and GT_EXPR, doing the rest with recursive calls using logical
5846 simplifications. */
5847 switch (code)
5848 {
5849 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5850 {
5851 tree tem
5852 = optimize_minmax_comparison (loc,
5853 invert_tree_comparison (code, false),
5854 type, op0, op1);
5855 if (tem)
5856 return invert_truthvalue_loc (loc, tem);
5857 return NULL_TREE;
5858 }
5859
5860 case GE_EXPR:
5861 return
5862 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5863 optimize_minmax_comparison
5864 (loc, EQ_EXPR, type, arg0, comp_const),
5865 optimize_minmax_comparison
5866 (loc, GT_EXPR, type, arg0, comp_const));
5867
5868 case EQ_EXPR:
5869 if (op_code == MAX_EXPR && consts_equal)
5870 /* MAX (X, 0) == 0 -> X <= 0 */
5871 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5872
5873 else if (op_code == MAX_EXPR && consts_lt)
5874 /* MAX (X, 0) == 5 -> X == 5 */
5875 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5876
5877 else if (op_code == MAX_EXPR)
5878 /* MAX (X, 0) == -1 -> false */
5879 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5880
5881 else if (consts_equal)
5882 /* MIN (X, 0) == 0 -> X >= 0 */
5883 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5884
5885 else if (consts_lt)
5886 /* MIN (X, 0) == 5 -> false */
5887 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5888
5889 else
5890 /* MIN (X, 0) == -1 -> X == -1 */
5891 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5892
5893 case GT_EXPR:
5894 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5895 /* MAX (X, 0) > 0 -> X > 0
5896 MAX (X, 0) > 5 -> X > 5 */
5897 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5898
5899 else if (op_code == MAX_EXPR)
5900 /* MAX (X, 0) > -1 -> true */
5901 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5902
5903 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5904 /* MIN (X, 0) > 0 -> false
5905 MIN (X, 0) > 5 -> false */
5906 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5907
5908 else
5909 /* MIN (X, 0) > -1 -> X > -1 */
5910 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5911
5912 default:
5913 return NULL_TREE;
5914 }
5915 }
5916 \f
5917 /* T is an integer expression that is being multiplied, divided, or taken a
5918 modulus (CODE says which and what kind of divide or modulus) by a
5919 constant C. See if we can eliminate that operation by folding it with
5920 other operations already in T. WIDE_TYPE, if non-null, is a type that
5921 should be used for the computation if wider than our type.
5922
5923 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5924 (X * 2) + (Y * 4). We must, however, be assured that either the original
5925 expression would not overflow or that overflow is undefined for the type
5926 in the language in question.
5927
5928 If we return a non-null expression, it is an equivalent form of the
5929 original computation, but need not be in the original type.
5930
5931 We set *STRICT_OVERFLOW_P to true if the return values depends on
5932 signed overflow being undefined. Otherwise we do not change
5933 *STRICT_OVERFLOW_P. */
5934
5935 static tree
5936 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5937 bool *strict_overflow_p)
5938 {
5939 /* To avoid exponential search depth, refuse to allow recursion past
5940 three levels. Beyond that (1) it's highly unlikely that we'll find
5941 something interesting and (2) we've probably processed it before
5942 when we built the inner expression. */
5943
5944 static int depth;
5945 tree ret;
5946
5947 if (depth > 3)
5948 return NULL;
5949
5950 depth++;
5951 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5952 depth--;
5953
5954 return ret;
5955 }
5956
5957 static tree
5958 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5959 bool *strict_overflow_p)
5960 {
5961 tree type = TREE_TYPE (t);
5962 enum tree_code tcode = TREE_CODE (t);
5963 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5964 > GET_MODE_SIZE (TYPE_MODE (type)))
5965 ? wide_type : type);
5966 tree t1, t2;
5967 int same_p = tcode == code;
5968 tree op0 = NULL_TREE, op1 = NULL_TREE;
5969 bool sub_strict_overflow_p;
5970
5971 /* Don't deal with constants of zero here; they confuse the code below. */
5972 if (integer_zerop (c))
5973 return NULL_TREE;
5974
5975 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5976 op0 = TREE_OPERAND (t, 0);
5977
5978 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5979 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5980
5981 /* Note that we need not handle conditional operations here since fold
5982 already handles those cases. So just do arithmetic here. */
5983 switch (tcode)
5984 {
5985 case INTEGER_CST:
5986 /* For a constant, we can always simplify if we are a multiply
5987 or (for divide and modulus) if it is a multiple of our constant. */
5988 if (code == MULT_EXPR
5989 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5990 return const_binop (code, fold_convert (ctype, t),
5991 fold_convert (ctype, c));
5992 break;
5993
5994 CASE_CONVERT: case NON_LVALUE_EXPR:
5995 /* If op0 is an expression ... */
5996 if ((COMPARISON_CLASS_P (op0)
5997 || UNARY_CLASS_P (op0)
5998 || BINARY_CLASS_P (op0)
5999 || VL_EXP_CLASS_P (op0)
6000 || EXPRESSION_CLASS_P (op0))
6001 /* ... and has wrapping overflow, and its type is smaller
6002 than ctype, then we cannot pass through as widening. */
6003 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6004 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6005 && (TYPE_PRECISION (ctype)
6006 > TYPE_PRECISION (TREE_TYPE (op0))))
6007 /* ... or this is a truncation (t is narrower than op0),
6008 then we cannot pass through this narrowing. */
6009 || (TYPE_PRECISION (type)
6010 < TYPE_PRECISION (TREE_TYPE (op0)))
6011 /* ... or signedness changes for division or modulus,
6012 then we cannot pass through this conversion. */
6013 || (code != MULT_EXPR
6014 && (TYPE_UNSIGNED (ctype)
6015 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6016 /* ... or has undefined overflow while the converted to
6017 type has not, we cannot do the operation in the inner type
6018 as that would introduce undefined overflow. */
6019 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6020 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6021 && !TYPE_OVERFLOW_UNDEFINED (type))))
6022 break;
6023
6024 /* Pass the constant down and see if we can make a simplification. If
6025 we can, replace this expression with the inner simplification for
6026 possible later conversion to our or some other type. */
6027 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6028 && TREE_CODE (t2) == INTEGER_CST
6029 && !TREE_OVERFLOW (t2)
6030 && (0 != (t1 = extract_muldiv (op0, t2, code,
6031 code == MULT_EXPR
6032 ? ctype : NULL_TREE,
6033 strict_overflow_p))))
6034 return t1;
6035 break;
6036
6037 case ABS_EXPR:
6038 /* If widening the type changes it from signed to unsigned, then we
6039 must avoid building ABS_EXPR itself as unsigned. */
6040 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6041 {
6042 tree cstype = (*signed_type_for) (ctype);
6043 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6044 != 0)
6045 {
6046 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6047 return fold_convert (ctype, t1);
6048 }
6049 break;
6050 }
6051 /* If the constant is negative, we cannot simplify this. */
6052 if (tree_int_cst_sgn (c) == -1)
6053 break;
6054 /* FALLTHROUGH */
6055 case NEGATE_EXPR:
6056 /* For division and modulus, type can't be unsigned, as e.g.
6057 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6058 For signed types, even with wrapping overflow, this is fine. */
6059 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6060 break;
6061 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6062 != 0)
6063 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6064 break;
6065
6066 case MIN_EXPR: case MAX_EXPR:
6067 /* If widening the type changes the signedness, then we can't perform
6068 this optimization as that changes the result. */
6069 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6070 break;
6071
6072 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6073 sub_strict_overflow_p = false;
6074 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6075 &sub_strict_overflow_p)) != 0
6076 && (t2 = extract_muldiv (op1, c, code, wide_type,
6077 &sub_strict_overflow_p)) != 0)
6078 {
6079 if (tree_int_cst_sgn (c) < 0)
6080 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6081 if (sub_strict_overflow_p)
6082 *strict_overflow_p = true;
6083 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6084 fold_convert (ctype, t2));
6085 }
6086 break;
6087
6088 case LSHIFT_EXPR: case RSHIFT_EXPR:
6089 /* If the second operand is constant, this is a multiplication
6090 or floor division, by a power of two, so we can treat it that
6091 way unless the multiplier or divisor overflows. Signed
6092 left-shift overflow is implementation-defined rather than
6093 undefined in C90, so do not convert signed left shift into
6094 multiplication. */
6095 if (TREE_CODE (op1) == INTEGER_CST
6096 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6097 /* const_binop may not detect overflow correctly,
6098 so check for it explicitly here. */
6099 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6100 && 0 != (t1 = fold_convert (ctype,
6101 const_binop (LSHIFT_EXPR,
6102 size_one_node,
6103 op1)))
6104 && !TREE_OVERFLOW (t1))
6105 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6106 ? MULT_EXPR : FLOOR_DIV_EXPR,
6107 ctype,
6108 fold_convert (ctype, op0),
6109 t1),
6110 c, code, wide_type, strict_overflow_p);
6111 break;
6112
6113 case PLUS_EXPR: case MINUS_EXPR:
6114 /* See if we can eliminate the operation on both sides. If we can, we
6115 can return a new PLUS or MINUS. If we can't, the only remaining
6116 cases where we can do anything are if the second operand is a
6117 constant. */
6118 sub_strict_overflow_p = false;
6119 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6120 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6121 if (t1 != 0 && t2 != 0
6122 && (code == MULT_EXPR
6123 /* If not multiplication, we can only do this if both operands
6124 are divisible by c. */
6125 || (multiple_of_p (ctype, op0, c)
6126 && multiple_of_p (ctype, op1, c))))
6127 {
6128 if (sub_strict_overflow_p)
6129 *strict_overflow_p = true;
6130 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6131 fold_convert (ctype, t2));
6132 }
6133
6134 /* If this was a subtraction, negate OP1 and set it to be an addition.
6135 This simplifies the logic below. */
6136 if (tcode == MINUS_EXPR)
6137 {
6138 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6139 /* If OP1 was not easily negatable, the constant may be OP0. */
6140 if (TREE_CODE (op0) == INTEGER_CST)
6141 {
6142 std::swap (op0, op1);
6143 std::swap (t1, t2);
6144 }
6145 }
6146
6147 if (TREE_CODE (op1) != INTEGER_CST)
6148 break;
6149
6150 /* If either OP1 or C are negative, this optimization is not safe for
6151 some of the division and remainder types while for others we need
6152 to change the code. */
6153 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6154 {
6155 if (code == CEIL_DIV_EXPR)
6156 code = FLOOR_DIV_EXPR;
6157 else if (code == FLOOR_DIV_EXPR)
6158 code = CEIL_DIV_EXPR;
6159 else if (code != MULT_EXPR
6160 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6161 break;
6162 }
6163
6164 /* If it's a multiply or a division/modulus operation of a multiple
6165 of our constant, do the operation and verify it doesn't overflow. */
6166 if (code == MULT_EXPR
6167 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6168 {
6169 op1 = const_binop (code, fold_convert (ctype, op1),
6170 fold_convert (ctype, c));
6171 /* We allow the constant to overflow with wrapping semantics. */
6172 if (op1 == 0
6173 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6174 break;
6175 }
6176 else
6177 break;
6178
6179 /* If we have an unsigned type, we cannot widen the operation since it
6180 will change the result if the original computation overflowed. */
6181 if (TYPE_UNSIGNED (ctype) && ctype != type)
6182 break;
6183
6184 /* If we were able to eliminate our operation from the first side,
6185 apply our operation to the second side and reform the PLUS. */
6186 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6187 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6188
6189 /* The last case is if we are a multiply. In that case, we can
6190 apply the distributive law to commute the multiply and addition
6191 if the multiplication of the constants doesn't overflow
6192 and overflow is defined. With undefined overflow
6193 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6194 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6195 return fold_build2 (tcode, ctype,
6196 fold_build2 (code, ctype,
6197 fold_convert (ctype, op0),
6198 fold_convert (ctype, c)),
6199 op1);
6200
6201 break;
6202
6203 case MULT_EXPR:
6204 /* We have a special case here if we are doing something like
6205 (C * 8) % 4 since we know that's zero. */
6206 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6207 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6208 /* If the multiplication can overflow we cannot optimize this. */
6209 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6210 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6211 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6212 {
6213 *strict_overflow_p = true;
6214 return omit_one_operand (type, integer_zero_node, op0);
6215 }
6216
6217 /* ... fall through ... */
6218
6219 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6220 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6221 /* If we can extract our operation from the LHS, do so and return a
6222 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6223 do something only if the second operand is a constant. */
6224 if (same_p
6225 && (t1 = extract_muldiv (op0, c, code, wide_type,
6226 strict_overflow_p)) != 0)
6227 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6228 fold_convert (ctype, op1));
6229 else if (tcode == MULT_EXPR && code == MULT_EXPR
6230 && (t1 = extract_muldiv (op1, c, code, wide_type,
6231 strict_overflow_p)) != 0)
6232 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6233 fold_convert (ctype, t1));
6234 else if (TREE_CODE (op1) != INTEGER_CST)
6235 return 0;
6236
6237 /* If these are the same operation types, we can associate them
6238 assuming no overflow. */
6239 if (tcode == code)
6240 {
6241 bool overflow_p = false;
6242 bool overflow_mul_p;
6243 signop sign = TYPE_SIGN (ctype);
6244 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6245 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6246 if (overflow_mul_p
6247 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6248 overflow_p = true;
6249 if (!overflow_p)
6250 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6251 wide_int_to_tree (ctype, mul));
6252 }
6253
6254 /* If these operations "cancel" each other, we have the main
6255 optimizations of this pass, which occur when either constant is a
6256 multiple of the other, in which case we replace this with either an
6257 operation or CODE or TCODE.
6258
6259 If we have an unsigned type, we cannot do this since it will change
6260 the result if the original computation overflowed. */
6261 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6262 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6263 || (tcode == MULT_EXPR
6264 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6265 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6266 && code != MULT_EXPR)))
6267 {
6268 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6269 {
6270 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6271 *strict_overflow_p = true;
6272 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6273 fold_convert (ctype,
6274 const_binop (TRUNC_DIV_EXPR,
6275 op1, c)));
6276 }
6277 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6278 {
6279 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6280 *strict_overflow_p = true;
6281 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6282 fold_convert (ctype,
6283 const_binop (TRUNC_DIV_EXPR,
6284 c, op1)));
6285 }
6286 }
6287 break;
6288
6289 default:
6290 break;
6291 }
6292
6293 return 0;
6294 }
6295 \f
6296 /* Return a node which has the indicated constant VALUE (either 0 or
6297 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6298 and is of the indicated TYPE. */
6299
6300 tree
6301 constant_boolean_node (bool value, tree type)
6302 {
6303 if (type == integer_type_node)
6304 return value ? integer_one_node : integer_zero_node;
6305 else if (type == boolean_type_node)
6306 return value ? boolean_true_node : boolean_false_node;
6307 else if (TREE_CODE (type) == VECTOR_TYPE)
6308 return build_vector_from_val (type,
6309 build_int_cst (TREE_TYPE (type),
6310 value ? -1 : 0));
6311 else
6312 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6313 }
6314
6315
6316 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6317 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6318 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6319 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6320 COND is the first argument to CODE; otherwise (as in the example
6321 given here), it is the second argument. TYPE is the type of the
6322 original expression. Return NULL_TREE if no simplification is
6323 possible. */
6324
6325 static tree
6326 fold_binary_op_with_conditional_arg (location_t loc,
6327 enum tree_code code,
6328 tree type, tree op0, tree op1,
6329 tree cond, tree arg, int cond_first_p)
6330 {
6331 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6332 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6333 tree test, true_value, false_value;
6334 tree lhs = NULL_TREE;
6335 tree rhs = NULL_TREE;
6336 enum tree_code cond_code = COND_EXPR;
6337
6338 if (TREE_CODE (cond) == COND_EXPR
6339 || TREE_CODE (cond) == VEC_COND_EXPR)
6340 {
6341 test = TREE_OPERAND (cond, 0);
6342 true_value = TREE_OPERAND (cond, 1);
6343 false_value = TREE_OPERAND (cond, 2);
6344 /* If this operand throws an expression, then it does not make
6345 sense to try to perform a logical or arithmetic operation
6346 involving it. */
6347 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6348 lhs = true_value;
6349 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6350 rhs = false_value;
6351 }
6352 else
6353 {
6354 tree testtype = TREE_TYPE (cond);
6355 test = cond;
6356 true_value = constant_boolean_node (true, testtype);
6357 false_value = constant_boolean_node (false, testtype);
6358 }
6359
6360 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6361 cond_code = VEC_COND_EXPR;
6362
6363 /* This transformation is only worthwhile if we don't have to wrap ARG
6364 in a SAVE_EXPR and the operation can be simplified without recursing
6365 on at least one of the branches once its pushed inside the COND_EXPR. */
6366 if (!TREE_CONSTANT (arg)
6367 && (TREE_SIDE_EFFECTS (arg)
6368 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6369 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6370 return NULL_TREE;
6371
6372 arg = fold_convert_loc (loc, arg_type, arg);
6373 if (lhs == 0)
6374 {
6375 true_value = fold_convert_loc (loc, cond_type, true_value);
6376 if (cond_first_p)
6377 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6378 else
6379 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6380 }
6381 if (rhs == 0)
6382 {
6383 false_value = fold_convert_loc (loc, cond_type, false_value);
6384 if (cond_first_p)
6385 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6386 else
6387 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6388 }
6389
6390 /* Check that we have simplified at least one of the branches. */
6391 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6392 return NULL_TREE;
6393
6394 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6395 }
6396
6397 \f
6398 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6399
6400 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6401 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6402 ADDEND is the same as X.
6403
6404 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6405 and finite. The problematic cases are when X is zero, and its mode
6406 has signed zeros. In the case of rounding towards -infinity,
6407 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6408 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6409
6410 bool
6411 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6412 {
6413 if (!real_zerop (addend))
6414 return false;
6415
6416 /* Don't allow the fold with -fsignaling-nans. */
6417 if (HONOR_SNANS (element_mode (type)))
6418 return false;
6419
6420 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6421 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6422 return true;
6423
6424 /* In a vector or complex, we would need to check the sign of all zeros. */
6425 if (TREE_CODE (addend) != REAL_CST)
6426 return false;
6427
6428 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6429 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6430 negate = !negate;
6431
6432 /* The mode has signed zeros, and we have to honor their sign.
6433 In this situation, there is only one case we can return true for.
6434 X - 0 is the same as X unless rounding towards -infinity is
6435 supported. */
6436 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6437 }
6438
6439 /* Subroutine of fold() that checks comparisons of built-in math
6440 functions against real constants.
6441
6442 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6443 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6444 is the type of the result and ARG0 and ARG1 are the operands of the
6445 comparison. ARG1 must be a TREE_REAL_CST.
6446
6447 The function returns the constant folded tree if a simplification
6448 can be made, and NULL_TREE otherwise. */
6449
6450 static tree
6451 fold_mathfn_compare (location_t loc,
6452 enum built_in_function fcode, enum tree_code code,
6453 tree type, tree arg0, tree arg1)
6454 {
6455 REAL_VALUE_TYPE c;
6456
6457 if (BUILTIN_SQRT_P (fcode))
6458 {
6459 tree arg = CALL_EXPR_ARG (arg0, 0);
6460 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6461
6462 c = TREE_REAL_CST (arg1);
6463 if (REAL_VALUE_NEGATIVE (c))
6464 {
6465 /* sqrt(x) < y is always false, if y is negative. */
6466 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6467 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6468
6469 /* sqrt(x) > y is always true, if y is negative and we
6470 don't care about NaNs, i.e. negative values of x. */
6471 if (code == NE_EXPR || !HONOR_NANS (mode))
6472 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6473
6474 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6475 return fold_build2_loc (loc, GE_EXPR, type, arg,
6476 build_real (TREE_TYPE (arg), dconst0));
6477 }
6478 else if (code == GT_EXPR || code == GE_EXPR)
6479 {
6480 REAL_VALUE_TYPE c2;
6481
6482 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6483 real_convert (&c2, mode, &c2);
6484
6485 if (REAL_VALUE_ISINF (c2))
6486 {
6487 /* sqrt(x) > y is x == +Inf, when y is very large. */
6488 if (HONOR_INFINITIES (mode))
6489 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6490 build_real (TREE_TYPE (arg), c2));
6491
6492 /* sqrt(x) > y is always false, when y is very large
6493 and we don't care about infinities. */
6494 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6495 }
6496
6497 /* sqrt(x) > c is the same as x > c*c. */
6498 return fold_build2_loc (loc, code, type, arg,
6499 build_real (TREE_TYPE (arg), c2));
6500 }
6501 else if (code == LT_EXPR || code == LE_EXPR)
6502 {
6503 REAL_VALUE_TYPE c2;
6504
6505 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6506 real_convert (&c2, mode, &c2);
6507
6508 if (REAL_VALUE_ISINF (c2))
6509 {
6510 /* sqrt(x) < y is always true, when y is a very large
6511 value and we don't care about NaNs or Infinities. */
6512 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6513 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6514
6515 /* sqrt(x) < y is x != +Inf when y is very large and we
6516 don't care about NaNs. */
6517 if (! HONOR_NANS (mode))
6518 return fold_build2_loc (loc, NE_EXPR, type, arg,
6519 build_real (TREE_TYPE (arg), c2));
6520
6521 /* sqrt(x) < y is x >= 0 when y is very large and we
6522 don't care about Infinities. */
6523 if (! HONOR_INFINITIES (mode))
6524 return fold_build2_loc (loc, GE_EXPR, type, arg,
6525 build_real (TREE_TYPE (arg), dconst0));
6526
6527 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6528 arg = save_expr (arg);
6529 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6530 fold_build2_loc (loc, GE_EXPR, type, arg,
6531 build_real (TREE_TYPE (arg),
6532 dconst0)),
6533 fold_build2_loc (loc, NE_EXPR, type, arg,
6534 build_real (TREE_TYPE (arg),
6535 c2)));
6536 }
6537
6538 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6539 if (! HONOR_NANS (mode))
6540 return fold_build2_loc (loc, code, type, arg,
6541 build_real (TREE_TYPE (arg), c2));
6542
6543 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6544 arg = save_expr (arg);
6545 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6546 fold_build2_loc (loc, GE_EXPR, type, arg,
6547 build_real (TREE_TYPE (arg),
6548 dconst0)),
6549 fold_build2_loc (loc, code, type, arg,
6550 build_real (TREE_TYPE (arg),
6551 c2)));
6552 }
6553 }
6554
6555 return NULL_TREE;
6556 }
6557
6558 /* Subroutine of fold() that optimizes comparisons against Infinities,
6559 either +Inf or -Inf.
6560
6561 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6562 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6563 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6564
6565 The function returns the constant folded tree if a simplification
6566 can be made, and NULL_TREE otherwise. */
6567
6568 static tree
6569 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6570 tree arg0, tree arg1)
6571 {
6572 machine_mode mode;
6573 REAL_VALUE_TYPE max;
6574 tree temp;
6575 bool neg;
6576
6577 mode = TYPE_MODE (TREE_TYPE (arg0));
6578
6579 /* For negative infinity swap the sense of the comparison. */
6580 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6581 if (neg)
6582 code = swap_tree_comparison (code);
6583
6584 switch (code)
6585 {
6586 case GT_EXPR:
6587 /* x > +Inf is always false, if with ignore sNANs. */
6588 if (HONOR_SNANS (mode))
6589 return NULL_TREE;
6590 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6591
6592 case LE_EXPR:
6593 /* x <= +Inf is always true, if we don't case about NaNs. */
6594 if (! HONOR_NANS (mode))
6595 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6596
6597 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6598 arg0 = save_expr (arg0);
6599 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6600
6601 case EQ_EXPR:
6602 case GE_EXPR:
6603 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6604 real_maxval (&max, neg, mode);
6605 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6606 arg0, build_real (TREE_TYPE (arg0), max));
6607
6608 case LT_EXPR:
6609 /* x < +Inf is always equal to x <= DBL_MAX. */
6610 real_maxval (&max, neg, mode);
6611 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6612 arg0, build_real (TREE_TYPE (arg0), max));
6613
6614 case NE_EXPR:
6615 /* x != +Inf is always equal to !(x > DBL_MAX). */
6616 real_maxval (&max, neg, mode);
6617 if (! HONOR_NANS (mode))
6618 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6619 arg0, build_real (TREE_TYPE (arg0), max));
6620
6621 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6622 arg0, build_real (TREE_TYPE (arg0), max));
6623 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6624
6625 default:
6626 break;
6627 }
6628
6629 return NULL_TREE;
6630 }
6631
6632 /* Subroutine of fold() that optimizes comparisons of a division by
6633 a nonzero integer constant against an integer constant, i.e.
6634 X/C1 op C2.
6635
6636 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6637 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6638 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6639
6640 The function returns the constant folded tree if a simplification
6641 can be made, and NULL_TREE otherwise. */
6642
6643 static tree
6644 fold_div_compare (location_t loc,
6645 enum tree_code code, tree type, tree arg0, tree arg1)
6646 {
6647 tree prod, tmp, hi, lo;
6648 tree arg00 = TREE_OPERAND (arg0, 0);
6649 tree arg01 = TREE_OPERAND (arg0, 1);
6650 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6651 bool neg_overflow = false;
6652 bool overflow;
6653
6654 /* We have to do this the hard way to detect unsigned overflow.
6655 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6656 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6657 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6658 neg_overflow = false;
6659
6660 if (sign == UNSIGNED)
6661 {
6662 tmp = int_const_binop (MINUS_EXPR, arg01,
6663 build_int_cst (TREE_TYPE (arg01), 1));
6664 lo = prod;
6665
6666 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6667 val = wi::add (prod, tmp, sign, &overflow);
6668 hi = force_fit_type (TREE_TYPE (arg00), val,
6669 -1, overflow | TREE_OVERFLOW (prod));
6670 }
6671 else if (tree_int_cst_sgn (arg01) >= 0)
6672 {
6673 tmp = int_const_binop (MINUS_EXPR, arg01,
6674 build_int_cst (TREE_TYPE (arg01), 1));
6675 switch (tree_int_cst_sgn (arg1))
6676 {
6677 case -1:
6678 neg_overflow = true;
6679 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6680 hi = prod;
6681 break;
6682
6683 case 0:
6684 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6685 hi = tmp;
6686 break;
6687
6688 case 1:
6689 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6690 lo = prod;
6691 break;
6692
6693 default:
6694 gcc_unreachable ();
6695 }
6696 }
6697 else
6698 {
6699 /* A negative divisor reverses the relational operators. */
6700 code = swap_tree_comparison (code);
6701
6702 tmp = int_const_binop (PLUS_EXPR, arg01,
6703 build_int_cst (TREE_TYPE (arg01), 1));
6704 switch (tree_int_cst_sgn (arg1))
6705 {
6706 case -1:
6707 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6708 lo = prod;
6709 break;
6710
6711 case 0:
6712 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6713 lo = tmp;
6714 break;
6715
6716 case 1:
6717 neg_overflow = true;
6718 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6719 hi = prod;
6720 break;
6721
6722 default:
6723 gcc_unreachable ();
6724 }
6725 }
6726
6727 switch (code)
6728 {
6729 case EQ_EXPR:
6730 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6731 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6732 if (TREE_OVERFLOW (hi))
6733 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6734 if (TREE_OVERFLOW (lo))
6735 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6736 return build_range_check (loc, type, arg00, 1, lo, hi);
6737
6738 case NE_EXPR:
6739 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6740 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6741 if (TREE_OVERFLOW (hi))
6742 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6743 if (TREE_OVERFLOW (lo))
6744 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6745 return build_range_check (loc, type, arg00, 0, lo, hi);
6746
6747 case LT_EXPR:
6748 if (TREE_OVERFLOW (lo))
6749 {
6750 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6751 return omit_one_operand_loc (loc, type, tmp, arg00);
6752 }
6753 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6754
6755 case LE_EXPR:
6756 if (TREE_OVERFLOW (hi))
6757 {
6758 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6759 return omit_one_operand_loc (loc, type, tmp, arg00);
6760 }
6761 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6762
6763 case GT_EXPR:
6764 if (TREE_OVERFLOW (hi))
6765 {
6766 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6767 return omit_one_operand_loc (loc, type, tmp, arg00);
6768 }
6769 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6770
6771 case GE_EXPR:
6772 if (TREE_OVERFLOW (lo))
6773 {
6774 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6775 return omit_one_operand_loc (loc, type, tmp, arg00);
6776 }
6777 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6778
6779 default:
6780 break;
6781 }
6782
6783 return NULL_TREE;
6784 }
6785
6786
6787 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6788 equality/inequality test, then return a simplified form of the test
6789 using a sign testing. Otherwise return NULL. TYPE is the desired
6790 result type. */
6791
6792 static tree
6793 fold_single_bit_test_into_sign_test (location_t loc,
6794 enum tree_code code, tree arg0, tree arg1,
6795 tree result_type)
6796 {
6797 /* If this is testing a single bit, we can optimize the test. */
6798 if ((code == NE_EXPR || code == EQ_EXPR)
6799 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6800 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6801 {
6802 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6803 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6804 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6805
6806 if (arg00 != NULL_TREE
6807 /* This is only a win if casting to a signed type is cheap,
6808 i.e. when arg00's type is not a partial mode. */
6809 && TYPE_PRECISION (TREE_TYPE (arg00))
6810 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6811 {
6812 tree stype = signed_type_for (TREE_TYPE (arg00));
6813 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6814 result_type,
6815 fold_convert_loc (loc, stype, arg00),
6816 build_int_cst (stype, 0));
6817 }
6818 }
6819
6820 return NULL_TREE;
6821 }
6822
6823 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6824 equality/inequality test, then return a simplified form of
6825 the test using shifts and logical operations. Otherwise return
6826 NULL. TYPE is the desired result type. */
6827
6828 tree
6829 fold_single_bit_test (location_t loc, enum tree_code code,
6830 tree arg0, tree arg1, tree result_type)
6831 {
6832 /* If this is testing a single bit, we can optimize the test. */
6833 if ((code == NE_EXPR || code == EQ_EXPR)
6834 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6835 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6836 {
6837 tree inner = TREE_OPERAND (arg0, 0);
6838 tree type = TREE_TYPE (arg0);
6839 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6840 machine_mode operand_mode = TYPE_MODE (type);
6841 int ops_unsigned;
6842 tree signed_type, unsigned_type, intermediate_type;
6843 tree tem, one;
6844
6845 /* First, see if we can fold the single bit test into a sign-bit
6846 test. */
6847 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6848 result_type);
6849 if (tem)
6850 return tem;
6851
6852 /* Otherwise we have (A & C) != 0 where C is a single bit,
6853 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6854 Similarly for (A & C) == 0. */
6855
6856 /* If INNER is a right shift of a constant and it plus BITNUM does
6857 not overflow, adjust BITNUM and INNER. */
6858 if (TREE_CODE (inner) == RSHIFT_EXPR
6859 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6860 && bitnum < TYPE_PRECISION (type)
6861 && wi::ltu_p (TREE_OPERAND (inner, 1),
6862 TYPE_PRECISION (type) - bitnum))
6863 {
6864 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6865 inner = TREE_OPERAND (inner, 0);
6866 }
6867
6868 /* If we are going to be able to omit the AND below, we must do our
6869 operations as unsigned. If we must use the AND, we have a choice.
6870 Normally unsigned is faster, but for some machines signed is. */
6871 #ifdef LOAD_EXTEND_OP
6872 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6873 && !flag_syntax_only) ? 0 : 1;
6874 #else
6875 ops_unsigned = 1;
6876 #endif
6877
6878 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6879 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6880 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6881 inner = fold_convert_loc (loc, intermediate_type, inner);
6882
6883 if (bitnum != 0)
6884 inner = build2 (RSHIFT_EXPR, intermediate_type,
6885 inner, size_int (bitnum));
6886
6887 one = build_int_cst (intermediate_type, 1);
6888
6889 if (code == EQ_EXPR)
6890 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6891
6892 /* Put the AND last so it can combine with more things. */
6893 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6894
6895 /* Make sure to return the proper type. */
6896 inner = fold_convert_loc (loc, result_type, inner);
6897
6898 return inner;
6899 }
6900 return NULL_TREE;
6901 }
6902
6903 /* Check whether we are allowed to reorder operands arg0 and arg1,
6904 such that the evaluation of arg1 occurs before arg0. */
6905
6906 static bool
6907 reorder_operands_p (const_tree arg0, const_tree arg1)
6908 {
6909 if (! flag_evaluation_order)
6910 return true;
6911 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6912 return true;
6913 return ! TREE_SIDE_EFFECTS (arg0)
6914 && ! TREE_SIDE_EFFECTS (arg1);
6915 }
6916
6917 /* Test whether it is preferable two swap two operands, ARG0 and
6918 ARG1, for example because ARG0 is an integer constant and ARG1
6919 isn't. If REORDER is true, only recommend swapping if we can
6920 evaluate the operands in reverse order. */
6921
6922 bool
6923 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6924 {
6925 if (CONSTANT_CLASS_P (arg1))
6926 return 0;
6927 if (CONSTANT_CLASS_P (arg0))
6928 return 1;
6929
6930 STRIP_NOPS (arg0);
6931 STRIP_NOPS (arg1);
6932
6933 if (TREE_CONSTANT (arg1))
6934 return 0;
6935 if (TREE_CONSTANT (arg0))
6936 return 1;
6937
6938 if (reorder && flag_evaluation_order
6939 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6940 return 0;
6941
6942 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6943 for commutative and comparison operators. Ensuring a canonical
6944 form allows the optimizers to find additional redundancies without
6945 having to explicitly check for both orderings. */
6946 if (TREE_CODE (arg0) == SSA_NAME
6947 && TREE_CODE (arg1) == SSA_NAME
6948 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6949 return 1;
6950
6951 /* Put SSA_NAMEs last. */
6952 if (TREE_CODE (arg1) == SSA_NAME)
6953 return 0;
6954 if (TREE_CODE (arg0) == SSA_NAME)
6955 return 1;
6956
6957 /* Put variables last. */
6958 if (DECL_P (arg1))
6959 return 0;
6960 if (DECL_P (arg0))
6961 return 1;
6962
6963 return 0;
6964 }
6965
6966 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6967 ARG0 is extended to a wider type. */
6968
6969 static tree
6970 fold_widened_comparison (location_t loc, enum tree_code code,
6971 tree type, tree arg0, tree arg1)
6972 {
6973 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6974 tree arg1_unw;
6975 tree shorter_type, outer_type;
6976 tree min, max;
6977 bool above, below;
6978
6979 if (arg0_unw == arg0)
6980 return NULL_TREE;
6981 shorter_type = TREE_TYPE (arg0_unw);
6982
6983 #ifdef HAVE_canonicalize_funcptr_for_compare
6984 /* Disable this optimization if we're casting a function pointer
6985 type on targets that require function pointer canonicalization. */
6986 if (HAVE_canonicalize_funcptr_for_compare
6987 && TREE_CODE (shorter_type) == POINTER_TYPE
6988 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6989 return NULL_TREE;
6990 #endif
6991
6992 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6993 return NULL_TREE;
6994
6995 arg1_unw = get_unwidened (arg1, NULL_TREE);
6996
6997 /* If possible, express the comparison in the shorter mode. */
6998 if ((code == EQ_EXPR || code == NE_EXPR
6999 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
7000 && (TREE_TYPE (arg1_unw) == shorter_type
7001 || ((TYPE_PRECISION (shorter_type)
7002 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
7003 && (TYPE_UNSIGNED (shorter_type)
7004 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
7005 || (TREE_CODE (arg1_unw) == INTEGER_CST
7006 && (TREE_CODE (shorter_type) == INTEGER_TYPE
7007 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
7008 && int_fits_type_p (arg1_unw, shorter_type))))
7009 return fold_build2_loc (loc, code, type, arg0_unw,
7010 fold_convert_loc (loc, shorter_type, arg1_unw));
7011
7012 if (TREE_CODE (arg1_unw) != INTEGER_CST
7013 || TREE_CODE (shorter_type) != INTEGER_TYPE
7014 || !int_fits_type_p (arg1_unw, shorter_type))
7015 return NULL_TREE;
7016
7017 /* If we are comparing with the integer that does not fit into the range
7018 of the shorter type, the result is known. */
7019 outer_type = TREE_TYPE (arg1_unw);
7020 min = lower_bound_in_type (outer_type, shorter_type);
7021 max = upper_bound_in_type (outer_type, shorter_type);
7022
7023 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7024 max, arg1_unw));
7025 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7026 arg1_unw, min));
7027
7028 switch (code)
7029 {
7030 case EQ_EXPR:
7031 if (above || below)
7032 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7033 break;
7034
7035 case NE_EXPR:
7036 if (above || below)
7037 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7038 break;
7039
7040 case LT_EXPR:
7041 case LE_EXPR:
7042 if (above)
7043 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7044 else if (below)
7045 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7046
7047 case GT_EXPR:
7048 case GE_EXPR:
7049 if (above)
7050 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7051 else if (below)
7052 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7053
7054 default:
7055 break;
7056 }
7057
7058 return NULL_TREE;
7059 }
7060
7061 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7062 ARG0 just the signedness is changed. */
7063
7064 static tree
7065 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7066 tree arg0, tree arg1)
7067 {
7068 tree arg0_inner;
7069 tree inner_type, outer_type;
7070
7071 if (!CONVERT_EXPR_P (arg0))
7072 return NULL_TREE;
7073
7074 outer_type = TREE_TYPE (arg0);
7075 arg0_inner = TREE_OPERAND (arg0, 0);
7076 inner_type = TREE_TYPE (arg0_inner);
7077
7078 #ifdef HAVE_canonicalize_funcptr_for_compare
7079 /* Disable this optimization if we're casting a function pointer
7080 type on targets that require function pointer canonicalization. */
7081 if (HAVE_canonicalize_funcptr_for_compare
7082 && TREE_CODE (inner_type) == POINTER_TYPE
7083 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7084 return NULL_TREE;
7085 #endif
7086
7087 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7088 return NULL_TREE;
7089
7090 if (TREE_CODE (arg1) != INTEGER_CST
7091 && !(CONVERT_EXPR_P (arg1)
7092 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7093 return NULL_TREE;
7094
7095 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7096 && code != NE_EXPR
7097 && code != EQ_EXPR)
7098 return NULL_TREE;
7099
7100 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7101 return NULL_TREE;
7102
7103 if (TREE_CODE (arg1) == INTEGER_CST)
7104 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7105 TREE_OVERFLOW (arg1));
7106 else
7107 arg1 = fold_convert_loc (loc, inner_type, arg1);
7108
7109 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7110 }
7111
7112
7113 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7114 means A >= Y && A != MAX, but in this case we know that
7115 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7116
7117 static tree
7118 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7119 {
7120 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7121
7122 if (TREE_CODE (bound) == LT_EXPR)
7123 a = TREE_OPERAND (bound, 0);
7124 else if (TREE_CODE (bound) == GT_EXPR)
7125 a = TREE_OPERAND (bound, 1);
7126 else
7127 return NULL_TREE;
7128
7129 typea = TREE_TYPE (a);
7130 if (!INTEGRAL_TYPE_P (typea)
7131 && !POINTER_TYPE_P (typea))
7132 return NULL_TREE;
7133
7134 if (TREE_CODE (ineq) == LT_EXPR)
7135 {
7136 a1 = TREE_OPERAND (ineq, 1);
7137 y = TREE_OPERAND (ineq, 0);
7138 }
7139 else if (TREE_CODE (ineq) == GT_EXPR)
7140 {
7141 a1 = TREE_OPERAND (ineq, 0);
7142 y = TREE_OPERAND (ineq, 1);
7143 }
7144 else
7145 return NULL_TREE;
7146
7147 if (TREE_TYPE (a1) != typea)
7148 return NULL_TREE;
7149
7150 if (POINTER_TYPE_P (typea))
7151 {
7152 /* Convert the pointer types into integer before taking the difference. */
7153 tree ta = fold_convert_loc (loc, ssizetype, a);
7154 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7155 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7156 }
7157 else
7158 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7159
7160 if (!diff || !integer_onep (diff))
7161 return NULL_TREE;
7162
7163 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7164 }
7165
7166 /* Fold a sum or difference of at least one multiplication.
7167 Returns the folded tree or NULL if no simplification could be made. */
7168
7169 static tree
7170 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7171 tree arg0, tree arg1)
7172 {
7173 tree arg00, arg01, arg10, arg11;
7174 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7175
7176 /* (A * C) +- (B * C) -> (A+-B) * C.
7177 (A * C) +- A -> A * (C+-1).
7178 We are most concerned about the case where C is a constant,
7179 but other combinations show up during loop reduction. Since
7180 it is not difficult, try all four possibilities. */
7181
7182 if (TREE_CODE (arg0) == MULT_EXPR)
7183 {
7184 arg00 = TREE_OPERAND (arg0, 0);
7185 arg01 = TREE_OPERAND (arg0, 1);
7186 }
7187 else if (TREE_CODE (arg0) == INTEGER_CST)
7188 {
7189 arg00 = build_one_cst (type);
7190 arg01 = arg0;
7191 }
7192 else
7193 {
7194 /* We cannot generate constant 1 for fract. */
7195 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7196 return NULL_TREE;
7197 arg00 = arg0;
7198 arg01 = build_one_cst (type);
7199 }
7200 if (TREE_CODE (arg1) == MULT_EXPR)
7201 {
7202 arg10 = TREE_OPERAND (arg1, 0);
7203 arg11 = TREE_OPERAND (arg1, 1);
7204 }
7205 else if (TREE_CODE (arg1) == INTEGER_CST)
7206 {
7207 arg10 = build_one_cst (type);
7208 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7209 the purpose of this canonicalization. */
7210 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7211 && negate_expr_p (arg1)
7212 && code == PLUS_EXPR)
7213 {
7214 arg11 = negate_expr (arg1);
7215 code = MINUS_EXPR;
7216 }
7217 else
7218 arg11 = arg1;
7219 }
7220 else
7221 {
7222 /* We cannot generate constant 1 for fract. */
7223 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7224 return NULL_TREE;
7225 arg10 = arg1;
7226 arg11 = build_one_cst (type);
7227 }
7228 same = NULL_TREE;
7229
7230 if (operand_equal_p (arg01, arg11, 0))
7231 same = arg01, alt0 = arg00, alt1 = arg10;
7232 else if (operand_equal_p (arg00, arg10, 0))
7233 same = arg00, alt0 = arg01, alt1 = arg11;
7234 else if (operand_equal_p (arg00, arg11, 0))
7235 same = arg00, alt0 = arg01, alt1 = arg10;
7236 else if (operand_equal_p (arg01, arg10, 0))
7237 same = arg01, alt0 = arg00, alt1 = arg11;
7238
7239 /* No identical multiplicands; see if we can find a common
7240 power-of-two factor in non-power-of-two multiplies. This
7241 can help in multi-dimensional array access. */
7242 else if (tree_fits_shwi_p (arg01)
7243 && tree_fits_shwi_p (arg11))
7244 {
7245 HOST_WIDE_INT int01, int11, tmp;
7246 bool swap = false;
7247 tree maybe_same;
7248 int01 = tree_to_shwi (arg01);
7249 int11 = tree_to_shwi (arg11);
7250
7251 /* Move min of absolute values to int11. */
7252 if (absu_hwi (int01) < absu_hwi (int11))
7253 {
7254 tmp = int01, int01 = int11, int11 = tmp;
7255 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7256 maybe_same = arg01;
7257 swap = true;
7258 }
7259 else
7260 maybe_same = arg11;
7261
7262 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7263 /* The remainder should not be a constant, otherwise we
7264 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7265 increased the number of multiplications necessary. */
7266 && TREE_CODE (arg10) != INTEGER_CST)
7267 {
7268 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7269 build_int_cst (TREE_TYPE (arg00),
7270 int01 / int11));
7271 alt1 = arg10;
7272 same = maybe_same;
7273 if (swap)
7274 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7275 }
7276 }
7277
7278 if (same)
7279 return fold_build2_loc (loc, MULT_EXPR, type,
7280 fold_build2_loc (loc, code, type,
7281 fold_convert_loc (loc, type, alt0),
7282 fold_convert_loc (loc, type, alt1)),
7283 fold_convert_loc (loc, type, same));
7284
7285 return NULL_TREE;
7286 }
7287
7288 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7289 specified by EXPR into the buffer PTR of length LEN bytes.
7290 Return the number of bytes placed in the buffer, or zero
7291 upon failure. */
7292
7293 static int
7294 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7295 {
7296 tree type = TREE_TYPE (expr);
7297 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7298 int byte, offset, word, words;
7299 unsigned char value;
7300
7301 if ((off == -1 && total_bytes > len)
7302 || off >= total_bytes)
7303 return 0;
7304 if (off == -1)
7305 off = 0;
7306 words = total_bytes / UNITS_PER_WORD;
7307
7308 for (byte = 0; byte < total_bytes; byte++)
7309 {
7310 int bitpos = byte * BITS_PER_UNIT;
7311 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7312 number of bytes. */
7313 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7314
7315 if (total_bytes > UNITS_PER_WORD)
7316 {
7317 word = byte / UNITS_PER_WORD;
7318 if (WORDS_BIG_ENDIAN)
7319 word = (words - 1) - word;
7320 offset = word * UNITS_PER_WORD;
7321 if (BYTES_BIG_ENDIAN)
7322 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7323 else
7324 offset += byte % UNITS_PER_WORD;
7325 }
7326 else
7327 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7328 if (offset >= off
7329 && offset - off < len)
7330 ptr[offset - off] = value;
7331 }
7332 return MIN (len, total_bytes - off);
7333 }
7334
7335
7336 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7337 specified by EXPR into the buffer PTR of length LEN bytes.
7338 Return the number of bytes placed in the buffer, or zero
7339 upon failure. */
7340
7341 static int
7342 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7343 {
7344 tree type = TREE_TYPE (expr);
7345 machine_mode mode = TYPE_MODE (type);
7346 int total_bytes = GET_MODE_SIZE (mode);
7347 FIXED_VALUE_TYPE value;
7348 tree i_value, i_type;
7349
7350 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7351 return 0;
7352
7353 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7354
7355 if (NULL_TREE == i_type
7356 || TYPE_PRECISION (i_type) != total_bytes)
7357 return 0;
7358
7359 value = TREE_FIXED_CST (expr);
7360 i_value = double_int_to_tree (i_type, value.data);
7361
7362 return native_encode_int (i_value, ptr, len, off);
7363 }
7364
7365
7366 /* Subroutine of native_encode_expr. Encode the REAL_CST
7367 specified by EXPR into the buffer PTR of length LEN bytes.
7368 Return the number of bytes placed in the buffer, or zero
7369 upon failure. */
7370
7371 static int
7372 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7373 {
7374 tree type = TREE_TYPE (expr);
7375 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7376 int byte, offset, word, words, bitpos;
7377 unsigned char value;
7378
7379 /* There are always 32 bits in each long, no matter the size of
7380 the hosts long. We handle floating point representations with
7381 up to 192 bits. */
7382 long tmp[6];
7383
7384 if ((off == -1 && total_bytes > len)
7385 || off >= total_bytes)
7386 return 0;
7387 if (off == -1)
7388 off = 0;
7389 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7390
7391 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7392
7393 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7394 bitpos += BITS_PER_UNIT)
7395 {
7396 byte = (bitpos / BITS_PER_UNIT) & 3;
7397 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7398
7399 if (UNITS_PER_WORD < 4)
7400 {
7401 word = byte / UNITS_PER_WORD;
7402 if (WORDS_BIG_ENDIAN)
7403 word = (words - 1) - word;
7404 offset = word * UNITS_PER_WORD;
7405 if (BYTES_BIG_ENDIAN)
7406 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7407 else
7408 offset += byte % UNITS_PER_WORD;
7409 }
7410 else
7411 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7412 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7413 if (offset >= off
7414 && offset - off < len)
7415 ptr[offset - off] = value;
7416 }
7417 return MIN (len, total_bytes - off);
7418 }
7419
7420 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7421 specified by EXPR into the buffer PTR of length LEN bytes.
7422 Return the number of bytes placed in the buffer, or zero
7423 upon failure. */
7424
7425 static int
7426 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7427 {
7428 int rsize, isize;
7429 tree part;
7430
7431 part = TREE_REALPART (expr);
7432 rsize = native_encode_expr (part, ptr, len, off);
7433 if (off == -1
7434 && rsize == 0)
7435 return 0;
7436 part = TREE_IMAGPART (expr);
7437 if (off != -1)
7438 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7439 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7440 if (off == -1
7441 && isize != rsize)
7442 return 0;
7443 return rsize + isize;
7444 }
7445
7446
7447 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7448 specified by EXPR into the buffer PTR of length LEN bytes.
7449 Return the number of bytes placed in the buffer, or zero
7450 upon failure. */
7451
7452 static int
7453 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7454 {
7455 unsigned i, count;
7456 int size, offset;
7457 tree itype, elem;
7458
7459 offset = 0;
7460 count = VECTOR_CST_NELTS (expr);
7461 itype = TREE_TYPE (TREE_TYPE (expr));
7462 size = GET_MODE_SIZE (TYPE_MODE (itype));
7463 for (i = 0; i < count; i++)
7464 {
7465 if (off >= size)
7466 {
7467 off -= size;
7468 continue;
7469 }
7470 elem = VECTOR_CST_ELT (expr, i);
7471 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7472 if ((off == -1 && res != size)
7473 || res == 0)
7474 return 0;
7475 offset += res;
7476 if (offset >= len)
7477 return offset;
7478 if (off != -1)
7479 off = 0;
7480 }
7481 return offset;
7482 }
7483
7484
7485 /* Subroutine of native_encode_expr. Encode the STRING_CST
7486 specified by EXPR into the buffer PTR of length LEN bytes.
7487 Return the number of bytes placed in the buffer, or zero
7488 upon failure. */
7489
7490 static int
7491 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7492 {
7493 tree type = TREE_TYPE (expr);
7494 HOST_WIDE_INT total_bytes;
7495
7496 if (TREE_CODE (type) != ARRAY_TYPE
7497 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7498 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7499 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7500 return 0;
7501 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7502 if ((off == -1 && total_bytes > len)
7503 || off >= total_bytes)
7504 return 0;
7505 if (off == -1)
7506 off = 0;
7507 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7508 {
7509 int written = 0;
7510 if (off < TREE_STRING_LENGTH (expr))
7511 {
7512 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7513 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7514 }
7515 memset (ptr + written, 0,
7516 MIN (total_bytes - written, len - written));
7517 }
7518 else
7519 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7520 return MIN (total_bytes - off, len);
7521 }
7522
7523
7524 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7525 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7526 buffer PTR of length LEN bytes. If OFF is not -1 then start
7527 the encoding at byte offset OFF and encode at most LEN bytes.
7528 Return the number of bytes placed in the buffer, or zero upon failure. */
7529
7530 int
7531 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7532 {
7533 switch (TREE_CODE (expr))
7534 {
7535 case INTEGER_CST:
7536 return native_encode_int (expr, ptr, len, off);
7537
7538 case REAL_CST:
7539 return native_encode_real (expr, ptr, len, off);
7540
7541 case FIXED_CST:
7542 return native_encode_fixed (expr, ptr, len, off);
7543
7544 case COMPLEX_CST:
7545 return native_encode_complex (expr, ptr, len, off);
7546
7547 case VECTOR_CST:
7548 return native_encode_vector (expr, ptr, len, off);
7549
7550 case STRING_CST:
7551 return native_encode_string (expr, ptr, len, off);
7552
7553 default:
7554 return 0;
7555 }
7556 }
7557
7558
7559 /* Subroutine of native_interpret_expr. Interpret the contents of
7560 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7561 If the buffer cannot be interpreted, return NULL_TREE. */
7562
7563 static tree
7564 native_interpret_int (tree type, const unsigned char *ptr, int len)
7565 {
7566 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7567
7568 if (total_bytes > len
7569 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7570 return NULL_TREE;
7571
7572 wide_int result = wi::from_buffer (ptr, total_bytes);
7573
7574 return wide_int_to_tree (type, result);
7575 }
7576
7577
7578 /* Subroutine of native_interpret_expr. Interpret the contents of
7579 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7580 If the buffer cannot be interpreted, return NULL_TREE. */
7581
7582 static tree
7583 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7584 {
7585 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7586 double_int result;
7587 FIXED_VALUE_TYPE fixed_value;
7588
7589 if (total_bytes > len
7590 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7591 return NULL_TREE;
7592
7593 result = double_int::from_buffer (ptr, total_bytes);
7594 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7595
7596 return build_fixed (type, fixed_value);
7597 }
7598
7599
7600 /* Subroutine of native_interpret_expr. Interpret the contents of
7601 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7602 If the buffer cannot be interpreted, return NULL_TREE. */
7603
7604 static tree
7605 native_interpret_real (tree type, const unsigned char *ptr, int len)
7606 {
7607 machine_mode mode = TYPE_MODE (type);
7608 int total_bytes = GET_MODE_SIZE (mode);
7609 int byte, offset, word, words, bitpos;
7610 unsigned char value;
7611 /* There are always 32 bits in each long, no matter the size of
7612 the hosts long. We handle floating point representations with
7613 up to 192 bits. */
7614 REAL_VALUE_TYPE r;
7615 long tmp[6];
7616
7617 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7618 if (total_bytes > len || total_bytes > 24)
7619 return NULL_TREE;
7620 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7621
7622 memset (tmp, 0, sizeof (tmp));
7623 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7624 bitpos += BITS_PER_UNIT)
7625 {
7626 byte = (bitpos / BITS_PER_UNIT) & 3;
7627 if (UNITS_PER_WORD < 4)
7628 {
7629 word = byte / UNITS_PER_WORD;
7630 if (WORDS_BIG_ENDIAN)
7631 word = (words - 1) - word;
7632 offset = word * UNITS_PER_WORD;
7633 if (BYTES_BIG_ENDIAN)
7634 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7635 else
7636 offset += byte % UNITS_PER_WORD;
7637 }
7638 else
7639 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7640 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7641
7642 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7643 }
7644
7645 real_from_target (&r, tmp, mode);
7646 return build_real (type, r);
7647 }
7648
7649
7650 /* Subroutine of native_interpret_expr. Interpret the contents of
7651 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7652 If the buffer cannot be interpreted, return NULL_TREE. */
7653
7654 static tree
7655 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7656 {
7657 tree etype, rpart, ipart;
7658 int size;
7659
7660 etype = TREE_TYPE (type);
7661 size = GET_MODE_SIZE (TYPE_MODE (etype));
7662 if (size * 2 > len)
7663 return NULL_TREE;
7664 rpart = native_interpret_expr (etype, ptr, size);
7665 if (!rpart)
7666 return NULL_TREE;
7667 ipart = native_interpret_expr (etype, ptr+size, size);
7668 if (!ipart)
7669 return NULL_TREE;
7670 return build_complex (type, rpart, ipart);
7671 }
7672
7673
7674 /* Subroutine of native_interpret_expr. Interpret the contents of
7675 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7676 If the buffer cannot be interpreted, return NULL_TREE. */
7677
7678 static tree
7679 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7680 {
7681 tree etype, elem;
7682 int i, size, count;
7683 tree *elements;
7684
7685 etype = TREE_TYPE (type);
7686 size = GET_MODE_SIZE (TYPE_MODE (etype));
7687 count = TYPE_VECTOR_SUBPARTS (type);
7688 if (size * count > len)
7689 return NULL_TREE;
7690
7691 elements = XALLOCAVEC (tree, count);
7692 for (i = count - 1; i >= 0; i--)
7693 {
7694 elem = native_interpret_expr (etype, ptr+(i*size), size);
7695 if (!elem)
7696 return NULL_TREE;
7697 elements[i] = elem;
7698 }
7699 return build_vector (type, elements);
7700 }
7701
7702
7703 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7704 the buffer PTR of length LEN as a constant of type TYPE. For
7705 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7706 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7707 return NULL_TREE. */
7708
7709 tree
7710 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7711 {
7712 switch (TREE_CODE (type))
7713 {
7714 case INTEGER_TYPE:
7715 case ENUMERAL_TYPE:
7716 case BOOLEAN_TYPE:
7717 case POINTER_TYPE:
7718 case REFERENCE_TYPE:
7719 return native_interpret_int (type, ptr, len);
7720
7721 case REAL_TYPE:
7722 return native_interpret_real (type, ptr, len);
7723
7724 case FIXED_POINT_TYPE:
7725 return native_interpret_fixed (type, ptr, len);
7726
7727 case COMPLEX_TYPE:
7728 return native_interpret_complex (type, ptr, len);
7729
7730 case VECTOR_TYPE:
7731 return native_interpret_vector (type, ptr, len);
7732
7733 default:
7734 return NULL_TREE;
7735 }
7736 }
7737
7738 /* Returns true if we can interpret the contents of a native encoding
7739 as TYPE. */
7740
7741 static bool
7742 can_native_interpret_type_p (tree type)
7743 {
7744 switch (TREE_CODE (type))
7745 {
7746 case INTEGER_TYPE:
7747 case ENUMERAL_TYPE:
7748 case BOOLEAN_TYPE:
7749 case POINTER_TYPE:
7750 case REFERENCE_TYPE:
7751 case FIXED_POINT_TYPE:
7752 case REAL_TYPE:
7753 case COMPLEX_TYPE:
7754 case VECTOR_TYPE:
7755 return true;
7756 default:
7757 return false;
7758 }
7759 }
7760
7761 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7762 TYPE at compile-time. If we're unable to perform the conversion
7763 return NULL_TREE. */
7764
7765 static tree
7766 fold_view_convert_expr (tree type, tree expr)
7767 {
7768 /* We support up to 512-bit values (for V8DFmode). */
7769 unsigned char buffer[64];
7770 int len;
7771
7772 /* Check that the host and target are sane. */
7773 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7774 return NULL_TREE;
7775
7776 len = native_encode_expr (expr, buffer, sizeof (buffer));
7777 if (len == 0)
7778 return NULL_TREE;
7779
7780 return native_interpret_expr (type, buffer, len);
7781 }
7782
7783 /* Build an expression for the address of T. Folds away INDIRECT_REF
7784 to avoid confusing the gimplify process. */
7785
7786 tree
7787 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7788 {
7789 /* The size of the object is not relevant when talking about its address. */
7790 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7791 t = TREE_OPERAND (t, 0);
7792
7793 if (TREE_CODE (t) == INDIRECT_REF)
7794 {
7795 t = TREE_OPERAND (t, 0);
7796
7797 if (TREE_TYPE (t) != ptrtype)
7798 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7799 }
7800 else if (TREE_CODE (t) == MEM_REF
7801 && integer_zerop (TREE_OPERAND (t, 1)))
7802 return TREE_OPERAND (t, 0);
7803 else if (TREE_CODE (t) == MEM_REF
7804 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7805 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7806 TREE_OPERAND (t, 0),
7807 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7808 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7809 {
7810 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7811
7812 if (TREE_TYPE (t) != ptrtype)
7813 t = fold_convert_loc (loc, ptrtype, t);
7814 }
7815 else
7816 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7817
7818 return t;
7819 }
7820
7821 /* Build an expression for the address of T. */
7822
7823 tree
7824 build_fold_addr_expr_loc (location_t loc, tree t)
7825 {
7826 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7827
7828 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7829 }
7830
7831 /* Fold a unary expression of code CODE and type TYPE with operand
7832 OP0. Return the folded expression if folding is successful.
7833 Otherwise, return NULL_TREE. */
7834
7835 tree
7836 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7837 {
7838 tree tem;
7839 tree arg0;
7840 enum tree_code_class kind = TREE_CODE_CLASS (code);
7841
7842 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7843 && TREE_CODE_LENGTH (code) == 1);
7844
7845 arg0 = op0;
7846 if (arg0)
7847 {
7848 if (CONVERT_EXPR_CODE_P (code)
7849 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7850 {
7851 /* Don't use STRIP_NOPS, because signedness of argument type
7852 matters. */
7853 STRIP_SIGN_NOPS (arg0);
7854 }
7855 else
7856 {
7857 /* Strip any conversions that don't change the mode. This
7858 is safe for every expression, except for a comparison
7859 expression because its signedness is derived from its
7860 operands.
7861
7862 Note that this is done as an internal manipulation within
7863 the constant folder, in order to find the simplest
7864 representation of the arguments so that their form can be
7865 studied. In any cases, the appropriate type conversions
7866 should be put back in the tree that will get out of the
7867 constant folder. */
7868 STRIP_NOPS (arg0);
7869 }
7870
7871 if (CONSTANT_CLASS_P (arg0))
7872 {
7873 tree tem = const_unop (code, type, arg0);
7874 if (tem)
7875 {
7876 if (TREE_TYPE (tem) != type)
7877 tem = fold_convert_loc (loc, type, tem);
7878 return tem;
7879 }
7880 }
7881 }
7882
7883 tem = generic_simplify (loc, code, type, op0);
7884 if (tem)
7885 return tem;
7886
7887 if (TREE_CODE_CLASS (code) == tcc_unary)
7888 {
7889 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7890 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7891 fold_build1_loc (loc, code, type,
7892 fold_convert_loc (loc, TREE_TYPE (op0),
7893 TREE_OPERAND (arg0, 1))));
7894 else if (TREE_CODE (arg0) == COND_EXPR)
7895 {
7896 tree arg01 = TREE_OPERAND (arg0, 1);
7897 tree arg02 = TREE_OPERAND (arg0, 2);
7898 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7899 arg01 = fold_build1_loc (loc, code, type,
7900 fold_convert_loc (loc,
7901 TREE_TYPE (op0), arg01));
7902 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7903 arg02 = fold_build1_loc (loc, code, type,
7904 fold_convert_loc (loc,
7905 TREE_TYPE (op0), arg02));
7906 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7907 arg01, arg02);
7908
7909 /* If this was a conversion, and all we did was to move into
7910 inside the COND_EXPR, bring it back out. But leave it if
7911 it is a conversion from integer to integer and the
7912 result precision is no wider than a word since such a
7913 conversion is cheap and may be optimized away by combine,
7914 while it couldn't if it were outside the COND_EXPR. Then return
7915 so we don't get into an infinite recursion loop taking the
7916 conversion out and then back in. */
7917
7918 if ((CONVERT_EXPR_CODE_P (code)
7919 || code == NON_LVALUE_EXPR)
7920 && TREE_CODE (tem) == COND_EXPR
7921 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7922 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7923 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7924 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7925 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7926 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7927 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7928 && (INTEGRAL_TYPE_P
7929 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7930 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7931 || flag_syntax_only))
7932 tem = build1_loc (loc, code, type,
7933 build3 (COND_EXPR,
7934 TREE_TYPE (TREE_OPERAND
7935 (TREE_OPERAND (tem, 1), 0)),
7936 TREE_OPERAND (tem, 0),
7937 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7938 TREE_OPERAND (TREE_OPERAND (tem, 2),
7939 0)));
7940 return tem;
7941 }
7942 }
7943
7944 switch (code)
7945 {
7946 case NON_LVALUE_EXPR:
7947 if (!maybe_lvalue_p (op0))
7948 return fold_convert_loc (loc, type, op0);
7949 return NULL_TREE;
7950
7951 CASE_CONVERT:
7952 case FLOAT_EXPR:
7953 case FIX_TRUNC_EXPR:
7954 if (COMPARISON_CLASS_P (op0))
7955 {
7956 /* If we have (type) (a CMP b) and type is an integral type, return
7957 new expression involving the new type. Canonicalize
7958 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7959 non-integral type.
7960 Do not fold the result as that would not simplify further, also
7961 folding again results in recursions. */
7962 if (TREE_CODE (type) == BOOLEAN_TYPE)
7963 return build2_loc (loc, TREE_CODE (op0), type,
7964 TREE_OPERAND (op0, 0),
7965 TREE_OPERAND (op0, 1));
7966 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7967 && TREE_CODE (type) != VECTOR_TYPE)
7968 return build3_loc (loc, COND_EXPR, type, op0,
7969 constant_boolean_node (true, type),
7970 constant_boolean_node (false, type));
7971 }
7972
7973 /* Handle (T *)&A.B.C for A being of type T and B and C
7974 living at offset zero. This occurs frequently in
7975 C++ upcasting and then accessing the base. */
7976 if (TREE_CODE (op0) == ADDR_EXPR
7977 && POINTER_TYPE_P (type)
7978 && handled_component_p (TREE_OPERAND (op0, 0)))
7979 {
7980 HOST_WIDE_INT bitsize, bitpos;
7981 tree offset;
7982 machine_mode mode;
7983 int unsignedp, volatilep;
7984 tree base = TREE_OPERAND (op0, 0);
7985 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7986 &mode, &unsignedp, &volatilep, false);
7987 /* If the reference was to a (constant) zero offset, we can use
7988 the address of the base if it has the same base type
7989 as the result type and the pointer type is unqualified. */
7990 if (! offset && bitpos == 0
7991 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7992 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7993 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7994 return fold_convert_loc (loc, type,
7995 build_fold_addr_expr_loc (loc, base));
7996 }
7997
7998 if (TREE_CODE (op0) == MODIFY_EXPR
7999 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
8000 /* Detect assigning a bitfield. */
8001 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
8002 && DECL_BIT_FIELD
8003 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
8004 {
8005 /* Don't leave an assignment inside a conversion
8006 unless assigning a bitfield. */
8007 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
8008 /* First do the assignment, then return converted constant. */
8009 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8010 TREE_NO_WARNING (tem) = 1;
8011 TREE_USED (tem) = 1;
8012 return tem;
8013 }
8014
8015 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8016 constants (if x has signed type, the sign bit cannot be set
8017 in c). This folds extension into the BIT_AND_EXPR.
8018 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8019 very likely don't have maximal range for their precision and this
8020 transformation effectively doesn't preserve non-maximal ranges. */
8021 if (TREE_CODE (type) == INTEGER_TYPE
8022 && TREE_CODE (op0) == BIT_AND_EXPR
8023 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8024 {
8025 tree and_expr = op0;
8026 tree and0 = TREE_OPERAND (and_expr, 0);
8027 tree and1 = TREE_OPERAND (and_expr, 1);
8028 int change = 0;
8029
8030 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8031 || (TYPE_PRECISION (type)
8032 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8033 change = 1;
8034 else if (TYPE_PRECISION (TREE_TYPE (and1))
8035 <= HOST_BITS_PER_WIDE_INT
8036 && tree_fits_uhwi_p (and1))
8037 {
8038 unsigned HOST_WIDE_INT cst;
8039
8040 cst = tree_to_uhwi (and1);
8041 cst &= HOST_WIDE_INT_M1U
8042 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8043 change = (cst == 0);
8044 #ifdef LOAD_EXTEND_OP
8045 if (change
8046 && !flag_syntax_only
8047 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8048 == ZERO_EXTEND))
8049 {
8050 tree uns = unsigned_type_for (TREE_TYPE (and0));
8051 and0 = fold_convert_loc (loc, uns, and0);
8052 and1 = fold_convert_loc (loc, uns, and1);
8053 }
8054 #endif
8055 }
8056 if (change)
8057 {
8058 tem = force_fit_type (type, wi::to_widest (and1), 0,
8059 TREE_OVERFLOW (and1));
8060 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8061 fold_convert_loc (loc, type, and0), tem);
8062 }
8063 }
8064
8065 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8066 when one of the new casts will fold away. Conservatively we assume
8067 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8068 if (POINTER_TYPE_P (type)
8069 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8070 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8071 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8072 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8073 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8074 {
8075 tree arg00 = TREE_OPERAND (arg0, 0);
8076 tree arg01 = TREE_OPERAND (arg0, 1);
8077
8078 return fold_build_pointer_plus_loc
8079 (loc, fold_convert_loc (loc, type, arg00), arg01);
8080 }
8081
8082 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8083 of the same precision, and X is an integer type not narrower than
8084 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8085 if (INTEGRAL_TYPE_P (type)
8086 && TREE_CODE (op0) == BIT_NOT_EXPR
8087 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8088 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8089 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8090 {
8091 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8092 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8093 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8094 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8095 fold_convert_loc (loc, type, tem));
8096 }
8097
8098 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8099 type of X and Y (integer types only). */
8100 if (INTEGRAL_TYPE_P (type)
8101 && TREE_CODE (op0) == MULT_EXPR
8102 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8103 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8104 {
8105 /* Be careful not to introduce new overflows. */
8106 tree mult_type;
8107 if (TYPE_OVERFLOW_WRAPS (type))
8108 mult_type = type;
8109 else
8110 mult_type = unsigned_type_for (type);
8111
8112 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8113 {
8114 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8115 fold_convert_loc (loc, mult_type,
8116 TREE_OPERAND (op0, 0)),
8117 fold_convert_loc (loc, mult_type,
8118 TREE_OPERAND (op0, 1)));
8119 return fold_convert_loc (loc, type, tem);
8120 }
8121 }
8122
8123 return NULL_TREE;
8124
8125 case VIEW_CONVERT_EXPR:
8126 if (TREE_CODE (op0) == MEM_REF)
8127 return fold_build2_loc (loc, MEM_REF, type,
8128 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8129
8130 return NULL_TREE;
8131
8132 case NEGATE_EXPR:
8133 tem = fold_negate_expr (loc, arg0);
8134 if (tem)
8135 return fold_convert_loc (loc, type, tem);
8136 return NULL_TREE;
8137
8138 case ABS_EXPR:
8139 /* Convert fabs((double)float) into (double)fabsf(float). */
8140 if (TREE_CODE (arg0) == NOP_EXPR
8141 && TREE_CODE (type) == REAL_TYPE)
8142 {
8143 tree targ0 = strip_float_extensions (arg0);
8144 if (targ0 != arg0)
8145 return fold_convert_loc (loc, type,
8146 fold_build1_loc (loc, ABS_EXPR,
8147 TREE_TYPE (targ0),
8148 targ0));
8149 }
8150 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8151 else if (TREE_CODE (arg0) == ABS_EXPR)
8152 return arg0;
8153
8154 /* Strip sign ops from argument. */
8155 if (TREE_CODE (type) == REAL_TYPE)
8156 {
8157 tem = fold_strip_sign_ops (arg0);
8158 if (tem)
8159 return fold_build1_loc (loc, ABS_EXPR, type,
8160 fold_convert_loc (loc, type, tem));
8161 }
8162 return NULL_TREE;
8163
8164 case CONJ_EXPR:
8165 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8166 return fold_convert_loc (loc, type, arg0);
8167 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8168 {
8169 tree itype = TREE_TYPE (type);
8170 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8171 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8172 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8173 negate_expr (ipart));
8174 }
8175 if (TREE_CODE (arg0) == CONJ_EXPR)
8176 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8177 return NULL_TREE;
8178
8179 case BIT_NOT_EXPR:
8180 /* Convert ~ (-A) to A - 1. */
8181 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8182 return fold_build2_loc (loc, MINUS_EXPR, type,
8183 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8184 build_int_cst (type, 1));
8185 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8186 else if (INTEGRAL_TYPE_P (type)
8187 && ((TREE_CODE (arg0) == MINUS_EXPR
8188 && integer_onep (TREE_OPERAND (arg0, 1)))
8189 || (TREE_CODE (arg0) == PLUS_EXPR
8190 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8191 {
8192 /* Perform the negation in ARG0's type and only then convert
8193 to TYPE as to avoid introducing undefined behavior. */
8194 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8195 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8196 TREE_OPERAND (arg0, 0));
8197 return fold_convert_loc (loc, type, t);
8198 }
8199 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8200 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8201 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8202 fold_convert_loc (loc, type,
8203 TREE_OPERAND (arg0, 0)))))
8204 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8205 fold_convert_loc (loc, type,
8206 TREE_OPERAND (arg0, 1)));
8207 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8208 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8209 fold_convert_loc (loc, type,
8210 TREE_OPERAND (arg0, 1)))))
8211 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8212 fold_convert_loc (loc, type,
8213 TREE_OPERAND (arg0, 0)), tem);
8214
8215 return NULL_TREE;
8216
8217 case TRUTH_NOT_EXPR:
8218 /* Note that the operand of this must be an int
8219 and its values must be 0 or 1.
8220 ("true" is a fixed value perhaps depending on the language,
8221 but we don't handle values other than 1 correctly yet.) */
8222 tem = fold_truth_not_expr (loc, arg0);
8223 if (!tem)
8224 return NULL_TREE;
8225 return fold_convert_loc (loc, type, tem);
8226
8227 case REALPART_EXPR:
8228 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8229 return fold_convert_loc (loc, type, arg0);
8230 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8231 {
8232 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8233 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8234 fold_build1_loc (loc, REALPART_EXPR, itype,
8235 TREE_OPERAND (arg0, 0)),
8236 fold_build1_loc (loc, REALPART_EXPR, itype,
8237 TREE_OPERAND (arg0, 1)));
8238 return fold_convert_loc (loc, type, tem);
8239 }
8240 if (TREE_CODE (arg0) == CONJ_EXPR)
8241 {
8242 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8243 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8244 TREE_OPERAND (arg0, 0));
8245 return fold_convert_loc (loc, type, tem);
8246 }
8247 if (TREE_CODE (arg0) == CALL_EXPR)
8248 {
8249 tree fn = get_callee_fndecl (arg0);
8250 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8251 switch (DECL_FUNCTION_CODE (fn))
8252 {
8253 CASE_FLT_FN (BUILT_IN_CEXPI):
8254 fn = mathfn_built_in (type, BUILT_IN_COS);
8255 if (fn)
8256 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8257 break;
8258
8259 default:
8260 break;
8261 }
8262 }
8263 return NULL_TREE;
8264
8265 case IMAGPART_EXPR:
8266 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8267 return build_zero_cst (type);
8268 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8269 {
8270 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8271 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8272 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8273 TREE_OPERAND (arg0, 0)),
8274 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8275 TREE_OPERAND (arg0, 1)));
8276 return fold_convert_loc (loc, type, tem);
8277 }
8278 if (TREE_CODE (arg0) == CONJ_EXPR)
8279 {
8280 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8281 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8282 return fold_convert_loc (loc, type, negate_expr (tem));
8283 }
8284 if (TREE_CODE (arg0) == CALL_EXPR)
8285 {
8286 tree fn = get_callee_fndecl (arg0);
8287 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8288 switch (DECL_FUNCTION_CODE (fn))
8289 {
8290 CASE_FLT_FN (BUILT_IN_CEXPI):
8291 fn = mathfn_built_in (type, BUILT_IN_SIN);
8292 if (fn)
8293 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8294 break;
8295
8296 default:
8297 break;
8298 }
8299 }
8300 return NULL_TREE;
8301
8302 case INDIRECT_REF:
8303 /* Fold *&X to X if X is an lvalue. */
8304 if (TREE_CODE (op0) == ADDR_EXPR)
8305 {
8306 tree op00 = TREE_OPERAND (op0, 0);
8307 if ((TREE_CODE (op00) == VAR_DECL
8308 || TREE_CODE (op00) == PARM_DECL
8309 || TREE_CODE (op00) == RESULT_DECL)
8310 && !TREE_READONLY (op00))
8311 return op00;
8312 }
8313 return NULL_TREE;
8314
8315 default:
8316 return NULL_TREE;
8317 } /* switch (code) */
8318 }
8319
8320
8321 /* If the operation was a conversion do _not_ mark a resulting constant
8322 with TREE_OVERFLOW if the original constant was not. These conversions
8323 have implementation defined behavior and retaining the TREE_OVERFLOW
8324 flag here would confuse later passes such as VRP. */
8325 tree
8326 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8327 tree type, tree op0)
8328 {
8329 tree res = fold_unary_loc (loc, code, type, op0);
8330 if (res
8331 && TREE_CODE (res) == INTEGER_CST
8332 && TREE_CODE (op0) == INTEGER_CST
8333 && CONVERT_EXPR_CODE_P (code))
8334 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8335
8336 return res;
8337 }
8338
8339 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8340 operands OP0 and OP1. LOC is the location of the resulting expression.
8341 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8342 Return the folded expression if folding is successful. Otherwise,
8343 return NULL_TREE. */
8344 static tree
8345 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8346 tree arg0, tree arg1, tree op0, tree op1)
8347 {
8348 tree tem;
8349
8350 /* We only do these simplifications if we are optimizing. */
8351 if (!optimize)
8352 return NULL_TREE;
8353
8354 /* Check for things like (A || B) && (A || C). We can convert this
8355 to A || (B && C). Note that either operator can be any of the four
8356 truth and/or operations and the transformation will still be
8357 valid. Also note that we only care about order for the
8358 ANDIF and ORIF operators. If B contains side effects, this
8359 might change the truth-value of A. */
8360 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8361 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8362 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8363 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8364 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8365 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8366 {
8367 tree a00 = TREE_OPERAND (arg0, 0);
8368 tree a01 = TREE_OPERAND (arg0, 1);
8369 tree a10 = TREE_OPERAND (arg1, 0);
8370 tree a11 = TREE_OPERAND (arg1, 1);
8371 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8372 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8373 && (code == TRUTH_AND_EXPR
8374 || code == TRUTH_OR_EXPR));
8375
8376 if (operand_equal_p (a00, a10, 0))
8377 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8378 fold_build2_loc (loc, code, type, a01, a11));
8379 else if (commutative && operand_equal_p (a00, a11, 0))
8380 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8381 fold_build2_loc (loc, code, type, a01, a10));
8382 else if (commutative && operand_equal_p (a01, a10, 0))
8383 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8384 fold_build2_loc (loc, code, type, a00, a11));
8385
8386 /* This case if tricky because we must either have commutative
8387 operators or else A10 must not have side-effects. */
8388
8389 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8390 && operand_equal_p (a01, a11, 0))
8391 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8392 fold_build2_loc (loc, code, type, a00, a10),
8393 a01);
8394 }
8395
8396 /* See if we can build a range comparison. */
8397 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8398 return tem;
8399
8400 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8401 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8402 {
8403 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8404 if (tem)
8405 return fold_build2_loc (loc, code, type, tem, arg1);
8406 }
8407
8408 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8409 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8410 {
8411 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8412 if (tem)
8413 return fold_build2_loc (loc, code, type, arg0, tem);
8414 }
8415
8416 /* Check for the possibility of merging component references. If our
8417 lhs is another similar operation, try to merge its rhs with our
8418 rhs. Then try to merge our lhs and rhs. */
8419 if (TREE_CODE (arg0) == code
8420 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8421 TREE_OPERAND (arg0, 1), arg1)))
8422 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8423
8424 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8425 return tem;
8426
8427 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8428 && (code == TRUTH_AND_EXPR
8429 || code == TRUTH_ANDIF_EXPR
8430 || code == TRUTH_OR_EXPR
8431 || code == TRUTH_ORIF_EXPR))
8432 {
8433 enum tree_code ncode, icode;
8434
8435 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8436 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8437 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8438
8439 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8440 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8441 We don't want to pack more than two leafs to a non-IF AND/OR
8442 expression.
8443 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8444 equal to IF-CODE, then we don't want to add right-hand operand.
8445 If the inner right-hand side of left-hand operand has
8446 side-effects, or isn't simple, then we can't add to it,
8447 as otherwise we might destroy if-sequence. */
8448 if (TREE_CODE (arg0) == icode
8449 && simple_operand_p_2 (arg1)
8450 /* Needed for sequence points to handle trappings, and
8451 side-effects. */
8452 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8453 {
8454 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8455 arg1);
8456 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8457 tem);
8458 }
8459 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8460 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8461 else if (TREE_CODE (arg1) == icode
8462 && simple_operand_p_2 (arg0)
8463 /* Needed for sequence points to handle trappings, and
8464 side-effects. */
8465 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8466 {
8467 tem = fold_build2_loc (loc, ncode, type,
8468 arg0, TREE_OPERAND (arg1, 0));
8469 return fold_build2_loc (loc, icode, type, tem,
8470 TREE_OPERAND (arg1, 1));
8471 }
8472 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8473 into (A OR B).
8474 For sequence point consistancy, we need to check for trapping,
8475 and side-effects. */
8476 else if (code == icode && simple_operand_p_2 (arg0)
8477 && simple_operand_p_2 (arg1))
8478 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8479 }
8480
8481 return NULL_TREE;
8482 }
8483
8484 /* Fold a binary expression of code CODE and type TYPE with operands
8485 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8486 Return the folded expression if folding is successful. Otherwise,
8487 return NULL_TREE. */
8488
8489 static tree
8490 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8491 {
8492 enum tree_code compl_code;
8493
8494 if (code == MIN_EXPR)
8495 compl_code = MAX_EXPR;
8496 else if (code == MAX_EXPR)
8497 compl_code = MIN_EXPR;
8498 else
8499 gcc_unreachable ();
8500
8501 /* MIN (MAX (a, b), b) == b. */
8502 if (TREE_CODE (op0) == compl_code
8503 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8504 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8505
8506 /* MIN (MAX (b, a), b) == b. */
8507 if (TREE_CODE (op0) == compl_code
8508 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8509 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8510 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8511
8512 /* MIN (a, MAX (a, b)) == a. */
8513 if (TREE_CODE (op1) == compl_code
8514 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8515 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8516 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8517
8518 /* MIN (a, MAX (b, a)) == a. */
8519 if (TREE_CODE (op1) == compl_code
8520 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8521 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8522 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8523
8524 return NULL_TREE;
8525 }
8526
8527 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8528 by changing CODE to reduce the magnitude of constants involved in
8529 ARG0 of the comparison.
8530 Returns a canonicalized comparison tree if a simplification was
8531 possible, otherwise returns NULL_TREE.
8532 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8533 valid if signed overflow is undefined. */
8534
8535 static tree
8536 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8537 tree arg0, tree arg1,
8538 bool *strict_overflow_p)
8539 {
8540 enum tree_code code0 = TREE_CODE (arg0);
8541 tree t, cst0 = NULL_TREE;
8542 int sgn0;
8543 bool swap = false;
8544
8545 /* Match A +- CST code arg1 and CST code arg1. We can change the
8546 first form only if overflow is undefined. */
8547 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8548 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8549 /* In principle pointers also have undefined overflow behavior,
8550 but that causes problems elsewhere. */
8551 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8552 && (code0 == MINUS_EXPR
8553 || code0 == PLUS_EXPR)
8554 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8555 || code0 == INTEGER_CST))
8556 return NULL_TREE;
8557
8558 /* Identify the constant in arg0 and its sign. */
8559 if (code0 == INTEGER_CST)
8560 cst0 = arg0;
8561 else
8562 cst0 = TREE_OPERAND (arg0, 1);
8563 sgn0 = tree_int_cst_sgn (cst0);
8564
8565 /* Overflowed constants and zero will cause problems. */
8566 if (integer_zerop (cst0)
8567 || TREE_OVERFLOW (cst0))
8568 return NULL_TREE;
8569
8570 /* See if we can reduce the magnitude of the constant in
8571 arg0 by changing the comparison code. */
8572 if (code0 == INTEGER_CST)
8573 {
8574 /* CST <= arg1 -> CST-1 < arg1. */
8575 if (code == LE_EXPR && sgn0 == 1)
8576 code = LT_EXPR;
8577 /* -CST < arg1 -> -CST-1 <= arg1. */
8578 else if (code == LT_EXPR && sgn0 == -1)
8579 code = LE_EXPR;
8580 /* CST > arg1 -> CST-1 >= arg1. */
8581 else if (code == GT_EXPR && sgn0 == 1)
8582 code = GE_EXPR;
8583 /* -CST >= arg1 -> -CST-1 > arg1. */
8584 else if (code == GE_EXPR && sgn0 == -1)
8585 code = GT_EXPR;
8586 else
8587 return NULL_TREE;
8588 /* arg1 code' CST' might be more canonical. */
8589 swap = true;
8590 }
8591 else
8592 {
8593 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8594 if (code == LT_EXPR
8595 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8596 code = LE_EXPR;
8597 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8598 else if (code == GT_EXPR
8599 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8600 code = GE_EXPR;
8601 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8602 else if (code == LE_EXPR
8603 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8604 code = LT_EXPR;
8605 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8606 else if (code == GE_EXPR
8607 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8608 code = GT_EXPR;
8609 else
8610 return NULL_TREE;
8611 *strict_overflow_p = true;
8612 }
8613
8614 /* Now build the constant reduced in magnitude. But not if that
8615 would produce one outside of its types range. */
8616 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8617 && ((sgn0 == 1
8618 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8619 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8620 || (sgn0 == -1
8621 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8622 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8623 /* We cannot swap the comparison here as that would cause us to
8624 endlessly recurse. */
8625 return NULL_TREE;
8626
8627 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8628 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8629 if (code0 != INTEGER_CST)
8630 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8631 t = fold_convert (TREE_TYPE (arg1), t);
8632
8633 /* If swapping might yield to a more canonical form, do so. */
8634 if (swap)
8635 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8636 else
8637 return fold_build2_loc (loc, code, type, t, arg1);
8638 }
8639
8640 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8641 overflow further. Try to decrease the magnitude of constants involved
8642 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8643 and put sole constants at the second argument position.
8644 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8645
8646 static tree
8647 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8648 tree arg0, tree arg1)
8649 {
8650 tree t;
8651 bool strict_overflow_p;
8652 const char * const warnmsg = G_("assuming signed overflow does not occur "
8653 "when reducing constant in comparison");
8654
8655 /* Try canonicalization by simplifying arg0. */
8656 strict_overflow_p = false;
8657 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8658 &strict_overflow_p);
8659 if (t)
8660 {
8661 if (strict_overflow_p)
8662 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8663 return t;
8664 }
8665
8666 /* Try canonicalization by simplifying arg1 using the swapped
8667 comparison. */
8668 code = swap_tree_comparison (code);
8669 strict_overflow_p = false;
8670 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8671 &strict_overflow_p);
8672 if (t && strict_overflow_p)
8673 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8674 return t;
8675 }
8676
8677 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8678 space. This is used to avoid issuing overflow warnings for
8679 expressions like &p->x which can not wrap. */
8680
8681 static bool
8682 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8683 {
8684 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8685 return true;
8686
8687 if (bitpos < 0)
8688 return true;
8689
8690 wide_int wi_offset;
8691 int precision = TYPE_PRECISION (TREE_TYPE (base));
8692 if (offset == NULL_TREE)
8693 wi_offset = wi::zero (precision);
8694 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8695 return true;
8696 else
8697 wi_offset = offset;
8698
8699 bool overflow;
8700 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8701 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8702 if (overflow)
8703 return true;
8704
8705 if (!wi::fits_uhwi_p (total))
8706 return true;
8707
8708 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8709 if (size <= 0)
8710 return true;
8711
8712 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8713 array. */
8714 if (TREE_CODE (base) == ADDR_EXPR)
8715 {
8716 HOST_WIDE_INT base_size;
8717
8718 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8719 if (base_size > 0 && size < base_size)
8720 size = base_size;
8721 }
8722
8723 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8724 }
8725
8726 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8727 kind INTEGER_CST. This makes sure to properly sign-extend the
8728 constant. */
8729
8730 static HOST_WIDE_INT
8731 size_low_cst (const_tree t)
8732 {
8733 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8734 int prec = TYPE_PRECISION (TREE_TYPE (t));
8735 if (prec < HOST_BITS_PER_WIDE_INT)
8736 return sext_hwi (w, prec);
8737 return w;
8738 }
8739
8740 /* Subroutine of fold_binary. This routine performs all of the
8741 transformations that are common to the equality/inequality
8742 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8743 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8744 fold_binary should call fold_binary. Fold a comparison with
8745 tree code CODE and type TYPE with operands OP0 and OP1. Return
8746 the folded comparison or NULL_TREE. */
8747
8748 static tree
8749 fold_comparison (location_t loc, enum tree_code code, tree type,
8750 tree op0, tree op1)
8751 {
8752 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8753 tree arg0, arg1, tem;
8754
8755 arg0 = op0;
8756 arg1 = op1;
8757
8758 STRIP_SIGN_NOPS (arg0);
8759 STRIP_SIGN_NOPS (arg1);
8760
8761 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8762 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8763 && (equality_code
8764 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8765 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8766 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8767 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8768 && TREE_CODE (arg1) == INTEGER_CST
8769 && !TREE_OVERFLOW (arg1))
8770 {
8771 const enum tree_code
8772 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8773 tree const1 = TREE_OPERAND (arg0, 1);
8774 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8775 tree variable = TREE_OPERAND (arg0, 0);
8776 tree new_const = int_const_binop (reverse_op, const2, const1);
8777
8778 /* If the constant operation overflowed this can be
8779 simplified as a comparison against INT_MAX/INT_MIN. */
8780 if (TREE_OVERFLOW (new_const)
8781 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8782 {
8783 int const1_sgn = tree_int_cst_sgn (const1);
8784 enum tree_code code2 = code;
8785
8786 /* Get the sign of the constant on the lhs if the
8787 operation were VARIABLE + CONST1. */
8788 if (TREE_CODE (arg0) == MINUS_EXPR)
8789 const1_sgn = -const1_sgn;
8790
8791 /* The sign of the constant determines if we overflowed
8792 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8793 Canonicalize to the INT_MIN overflow by swapping the comparison
8794 if necessary. */
8795 if (const1_sgn == -1)
8796 code2 = swap_tree_comparison (code);
8797
8798 /* We now can look at the canonicalized case
8799 VARIABLE + 1 CODE2 INT_MIN
8800 and decide on the result. */
8801 switch (code2)
8802 {
8803 case EQ_EXPR:
8804 case LT_EXPR:
8805 case LE_EXPR:
8806 return
8807 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8808
8809 case NE_EXPR:
8810 case GE_EXPR:
8811 case GT_EXPR:
8812 return
8813 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8814
8815 default:
8816 gcc_unreachable ();
8817 }
8818 }
8819 else
8820 {
8821 if (!equality_code)
8822 fold_overflow_warning ("assuming signed overflow does not occur "
8823 "when changing X +- C1 cmp C2 to "
8824 "X cmp C2 -+ C1",
8825 WARN_STRICT_OVERFLOW_COMPARISON);
8826 return fold_build2_loc (loc, code, type, variable, new_const);
8827 }
8828 }
8829
8830 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8831 if (TREE_CODE (arg0) == MINUS_EXPR
8832 && equality_code
8833 && integer_zerop (arg1))
8834 {
8835 /* ??? The transformation is valid for the other operators if overflow
8836 is undefined for the type, but performing it here badly interacts
8837 with the transformation in fold_cond_expr_with_comparison which
8838 attempts to synthetize ABS_EXPR. */
8839 if (!equality_code)
8840 fold_overflow_warning ("assuming signed overflow does not occur "
8841 "when changing X - Y cmp 0 to X cmp Y",
8842 WARN_STRICT_OVERFLOW_COMPARISON);
8843 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8844 TREE_OPERAND (arg0, 1));
8845 }
8846
8847 /* For comparisons of pointers we can decompose it to a compile time
8848 comparison of the base objects and the offsets into the object.
8849 This requires at least one operand being an ADDR_EXPR or a
8850 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8851 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8852 && (TREE_CODE (arg0) == ADDR_EXPR
8853 || TREE_CODE (arg1) == ADDR_EXPR
8854 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8855 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8856 {
8857 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8858 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8859 machine_mode mode;
8860 int volatilep, unsignedp;
8861 bool indirect_base0 = false, indirect_base1 = false;
8862
8863 /* Get base and offset for the access. Strip ADDR_EXPR for
8864 get_inner_reference, but put it back by stripping INDIRECT_REF
8865 off the base object if possible. indirect_baseN will be true
8866 if baseN is not an address but refers to the object itself. */
8867 base0 = arg0;
8868 if (TREE_CODE (arg0) == ADDR_EXPR)
8869 {
8870 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8871 &bitsize, &bitpos0, &offset0, &mode,
8872 &unsignedp, &volatilep, false);
8873 if (TREE_CODE (base0) == INDIRECT_REF)
8874 base0 = TREE_OPERAND (base0, 0);
8875 else
8876 indirect_base0 = true;
8877 }
8878 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8879 {
8880 base0 = TREE_OPERAND (arg0, 0);
8881 STRIP_SIGN_NOPS (base0);
8882 if (TREE_CODE (base0) == ADDR_EXPR)
8883 {
8884 base0 = TREE_OPERAND (base0, 0);
8885 indirect_base0 = true;
8886 }
8887 offset0 = TREE_OPERAND (arg0, 1);
8888 if (tree_fits_shwi_p (offset0))
8889 {
8890 HOST_WIDE_INT off = size_low_cst (offset0);
8891 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8892 * BITS_PER_UNIT)
8893 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8894 {
8895 bitpos0 = off * BITS_PER_UNIT;
8896 offset0 = NULL_TREE;
8897 }
8898 }
8899 }
8900
8901 base1 = arg1;
8902 if (TREE_CODE (arg1) == ADDR_EXPR)
8903 {
8904 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8905 &bitsize, &bitpos1, &offset1, &mode,
8906 &unsignedp, &volatilep, false);
8907 if (TREE_CODE (base1) == INDIRECT_REF)
8908 base1 = TREE_OPERAND (base1, 0);
8909 else
8910 indirect_base1 = true;
8911 }
8912 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8913 {
8914 base1 = TREE_OPERAND (arg1, 0);
8915 STRIP_SIGN_NOPS (base1);
8916 if (TREE_CODE (base1) == ADDR_EXPR)
8917 {
8918 base1 = TREE_OPERAND (base1, 0);
8919 indirect_base1 = true;
8920 }
8921 offset1 = TREE_OPERAND (arg1, 1);
8922 if (tree_fits_shwi_p (offset1))
8923 {
8924 HOST_WIDE_INT off = size_low_cst (offset1);
8925 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8926 * BITS_PER_UNIT)
8927 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8928 {
8929 bitpos1 = off * BITS_PER_UNIT;
8930 offset1 = NULL_TREE;
8931 }
8932 }
8933 }
8934
8935 /* A local variable can never be pointed to by
8936 the default SSA name of an incoming parameter. */
8937 if ((TREE_CODE (arg0) == ADDR_EXPR
8938 && indirect_base0
8939 && TREE_CODE (base0) == VAR_DECL
8940 && auto_var_in_fn_p (base0, current_function_decl)
8941 && !indirect_base1
8942 && TREE_CODE (base1) == SSA_NAME
8943 && SSA_NAME_IS_DEFAULT_DEF (base1)
8944 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8945 || (TREE_CODE (arg1) == ADDR_EXPR
8946 && indirect_base1
8947 && TREE_CODE (base1) == VAR_DECL
8948 && auto_var_in_fn_p (base1, current_function_decl)
8949 && !indirect_base0
8950 && TREE_CODE (base0) == SSA_NAME
8951 && SSA_NAME_IS_DEFAULT_DEF (base0)
8952 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8953 {
8954 if (code == NE_EXPR)
8955 return constant_boolean_node (1, type);
8956 else if (code == EQ_EXPR)
8957 return constant_boolean_node (0, type);
8958 }
8959 /* If we have equivalent bases we might be able to simplify. */
8960 else if (indirect_base0 == indirect_base1
8961 && operand_equal_p (base0, base1, 0))
8962 {
8963 /* We can fold this expression to a constant if the non-constant
8964 offset parts are equal. */
8965 if ((offset0 == offset1
8966 || (offset0 && offset1
8967 && operand_equal_p (offset0, offset1, 0)))
8968 && (code == EQ_EXPR
8969 || code == NE_EXPR
8970 || (indirect_base0 && DECL_P (base0))
8971 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8972
8973 {
8974 if (!equality_code
8975 && bitpos0 != bitpos1
8976 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8977 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8978 fold_overflow_warning (("assuming pointer wraparound does not "
8979 "occur when comparing P +- C1 with "
8980 "P +- C2"),
8981 WARN_STRICT_OVERFLOW_CONDITIONAL);
8982
8983 switch (code)
8984 {
8985 case EQ_EXPR:
8986 return constant_boolean_node (bitpos0 == bitpos1, type);
8987 case NE_EXPR:
8988 return constant_boolean_node (bitpos0 != bitpos1, type);
8989 case LT_EXPR:
8990 return constant_boolean_node (bitpos0 < bitpos1, type);
8991 case LE_EXPR:
8992 return constant_boolean_node (bitpos0 <= bitpos1, type);
8993 case GE_EXPR:
8994 return constant_boolean_node (bitpos0 >= bitpos1, type);
8995 case GT_EXPR:
8996 return constant_boolean_node (bitpos0 > bitpos1, type);
8997 default:;
8998 }
8999 }
9000 /* We can simplify the comparison to a comparison of the variable
9001 offset parts if the constant offset parts are equal.
9002 Be careful to use signed sizetype here because otherwise we
9003 mess with array offsets in the wrong way. This is possible
9004 because pointer arithmetic is restricted to retain within an
9005 object and overflow on pointer differences is undefined as of
9006 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
9007 else if (bitpos0 == bitpos1
9008 && (equality_code
9009 || (indirect_base0 && DECL_P (base0))
9010 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9011 {
9012 /* By converting to signed sizetype we cover middle-end pointer
9013 arithmetic which operates on unsigned pointer types of size
9014 type size and ARRAY_REF offsets which are properly sign or
9015 zero extended from their type in case it is narrower than
9016 sizetype. */
9017 if (offset0 == NULL_TREE)
9018 offset0 = build_int_cst (ssizetype, 0);
9019 else
9020 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9021 if (offset1 == NULL_TREE)
9022 offset1 = build_int_cst (ssizetype, 0);
9023 else
9024 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9025
9026 if (!equality_code
9027 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9028 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9029 fold_overflow_warning (("assuming pointer wraparound does not "
9030 "occur when comparing P +- C1 with "
9031 "P +- C2"),
9032 WARN_STRICT_OVERFLOW_COMPARISON);
9033
9034 return fold_build2_loc (loc, code, type, offset0, offset1);
9035 }
9036 }
9037 /* For non-equal bases we can simplify if they are addresses
9038 declarations with different addresses. */
9039 else if (indirect_base0 && indirect_base1
9040 /* We know that !operand_equal_p (base0, base1, 0)
9041 because the if condition was false. But make
9042 sure two decls are not the same. */
9043 && base0 != base1
9044 && TREE_CODE (arg0) == ADDR_EXPR
9045 && TREE_CODE (arg1) == ADDR_EXPR
9046 && DECL_P (base0)
9047 && DECL_P (base1)
9048 /* Watch for aliases. */
9049 && (!decl_in_symtab_p (base0)
9050 || !decl_in_symtab_p (base1)
9051 || !symtab_node::get_create (base0)->equal_address_to
9052 (symtab_node::get_create (base1))))
9053 {
9054 if (code == EQ_EXPR)
9055 return omit_two_operands_loc (loc, type, boolean_false_node,
9056 arg0, arg1);
9057 else if (code == NE_EXPR)
9058 return omit_two_operands_loc (loc, type, boolean_true_node,
9059 arg0, arg1);
9060 }
9061 /* For equal offsets we can simplify to a comparison of the
9062 base addresses. */
9063 else if (bitpos0 == bitpos1
9064 && (indirect_base0
9065 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9066 && (indirect_base1
9067 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9068 && ((offset0 == offset1)
9069 || (offset0 && offset1
9070 && operand_equal_p (offset0, offset1, 0))))
9071 {
9072 if (indirect_base0)
9073 base0 = build_fold_addr_expr_loc (loc, base0);
9074 if (indirect_base1)
9075 base1 = build_fold_addr_expr_loc (loc, base1);
9076 return fold_build2_loc (loc, code, type, base0, base1);
9077 }
9078 }
9079
9080 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9081 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9082 the resulting offset is smaller in absolute value than the
9083 original one and has the same sign. */
9084 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9085 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9086 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9087 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9088 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9089 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9090 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9091 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9092 {
9093 tree const1 = TREE_OPERAND (arg0, 1);
9094 tree const2 = TREE_OPERAND (arg1, 1);
9095 tree variable1 = TREE_OPERAND (arg0, 0);
9096 tree variable2 = TREE_OPERAND (arg1, 0);
9097 tree cst;
9098 const char * const warnmsg = G_("assuming signed overflow does not "
9099 "occur when combining constants around "
9100 "a comparison");
9101
9102 /* Put the constant on the side where it doesn't overflow and is
9103 of lower absolute value and of same sign than before. */
9104 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9105 ? MINUS_EXPR : PLUS_EXPR,
9106 const2, const1);
9107 if (!TREE_OVERFLOW (cst)
9108 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9109 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9110 {
9111 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9112 return fold_build2_loc (loc, code, type,
9113 variable1,
9114 fold_build2_loc (loc, TREE_CODE (arg1),
9115 TREE_TYPE (arg1),
9116 variable2, cst));
9117 }
9118
9119 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9120 ? MINUS_EXPR : PLUS_EXPR,
9121 const1, const2);
9122 if (!TREE_OVERFLOW (cst)
9123 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9124 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9125 {
9126 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9127 return fold_build2_loc (loc, code, type,
9128 fold_build2_loc (loc, TREE_CODE (arg0),
9129 TREE_TYPE (arg0),
9130 variable1, cst),
9131 variable2);
9132 }
9133 }
9134
9135 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9136 signed arithmetic case. That form is created by the compiler
9137 often enough for folding it to be of value. One example is in
9138 computing loop trip counts after Operator Strength Reduction. */
9139 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9140 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9141 && TREE_CODE (arg0) == MULT_EXPR
9142 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9143 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9144 && integer_zerop (arg1))
9145 {
9146 tree const1 = TREE_OPERAND (arg0, 1);
9147 tree const2 = arg1; /* zero */
9148 tree variable1 = TREE_OPERAND (arg0, 0);
9149 enum tree_code cmp_code = code;
9150
9151 /* Handle unfolded multiplication by zero. */
9152 if (integer_zerop (const1))
9153 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9154
9155 fold_overflow_warning (("assuming signed overflow does not occur when "
9156 "eliminating multiplication in comparison "
9157 "with zero"),
9158 WARN_STRICT_OVERFLOW_COMPARISON);
9159
9160 /* If const1 is negative we swap the sense of the comparison. */
9161 if (tree_int_cst_sgn (const1) < 0)
9162 cmp_code = swap_tree_comparison (cmp_code);
9163
9164 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9165 }
9166
9167 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9168 if (tem)
9169 return tem;
9170
9171 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9172 {
9173 tree targ0 = strip_float_extensions (arg0);
9174 tree targ1 = strip_float_extensions (arg1);
9175 tree newtype = TREE_TYPE (targ0);
9176
9177 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9178 newtype = TREE_TYPE (targ1);
9179
9180 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9181 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9182 return fold_build2_loc (loc, code, type,
9183 fold_convert_loc (loc, newtype, targ0),
9184 fold_convert_loc (loc, newtype, targ1));
9185
9186 /* (-a) CMP (-b) -> b CMP a */
9187 if (TREE_CODE (arg0) == NEGATE_EXPR
9188 && TREE_CODE (arg1) == NEGATE_EXPR)
9189 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9190 TREE_OPERAND (arg0, 0));
9191
9192 if (TREE_CODE (arg1) == REAL_CST)
9193 {
9194 REAL_VALUE_TYPE cst;
9195 cst = TREE_REAL_CST (arg1);
9196
9197 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9198 if (TREE_CODE (arg0) == NEGATE_EXPR)
9199 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9200 TREE_OPERAND (arg0, 0),
9201 build_real (TREE_TYPE (arg1),
9202 real_value_negate (&cst)));
9203
9204 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9205 /* a CMP (-0) -> a CMP 0 */
9206 if (REAL_VALUE_MINUS_ZERO (cst))
9207 return fold_build2_loc (loc, code, type, arg0,
9208 build_real (TREE_TYPE (arg1), dconst0));
9209
9210 /* x != NaN is always true, other ops are always false. */
9211 if (REAL_VALUE_ISNAN (cst)
9212 && ! HONOR_SNANS (arg1))
9213 {
9214 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9215 return omit_one_operand_loc (loc, type, tem, arg0);
9216 }
9217
9218 /* Fold comparisons against infinity. */
9219 if (REAL_VALUE_ISINF (cst)
9220 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9221 {
9222 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9223 if (tem != NULL_TREE)
9224 return tem;
9225 }
9226 }
9227
9228 /* If this is a comparison of a real constant with a PLUS_EXPR
9229 or a MINUS_EXPR of a real constant, we can convert it into a
9230 comparison with a revised real constant as long as no overflow
9231 occurs when unsafe_math_optimizations are enabled. */
9232 if (flag_unsafe_math_optimizations
9233 && TREE_CODE (arg1) == REAL_CST
9234 && (TREE_CODE (arg0) == PLUS_EXPR
9235 || TREE_CODE (arg0) == MINUS_EXPR)
9236 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9237 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9238 ? MINUS_EXPR : PLUS_EXPR,
9239 arg1, TREE_OPERAND (arg0, 1)))
9240 && !TREE_OVERFLOW (tem))
9241 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9242
9243 /* Likewise, we can simplify a comparison of a real constant with
9244 a MINUS_EXPR whose first operand is also a real constant, i.e.
9245 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9246 floating-point types only if -fassociative-math is set. */
9247 if (flag_associative_math
9248 && TREE_CODE (arg1) == REAL_CST
9249 && TREE_CODE (arg0) == MINUS_EXPR
9250 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9251 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9252 arg1))
9253 && !TREE_OVERFLOW (tem))
9254 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9255 TREE_OPERAND (arg0, 1), tem);
9256
9257 /* Fold comparisons against built-in math functions. */
9258 if (TREE_CODE (arg1) == REAL_CST
9259 && flag_unsafe_math_optimizations
9260 && ! flag_errno_math)
9261 {
9262 enum built_in_function fcode = builtin_mathfn_code (arg0);
9263
9264 if (fcode != END_BUILTINS)
9265 {
9266 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9267 if (tem != NULL_TREE)
9268 return tem;
9269 }
9270 }
9271 }
9272
9273 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9274 && CONVERT_EXPR_P (arg0))
9275 {
9276 /* If we are widening one operand of an integer comparison,
9277 see if the other operand is similarly being widened. Perhaps we
9278 can do the comparison in the narrower type. */
9279 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9280 if (tem)
9281 return tem;
9282
9283 /* Or if we are changing signedness. */
9284 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9285 if (tem)
9286 return tem;
9287 }
9288
9289 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9290 constant, we can simplify it. */
9291 if (TREE_CODE (arg1) == INTEGER_CST
9292 && (TREE_CODE (arg0) == MIN_EXPR
9293 || TREE_CODE (arg0) == MAX_EXPR)
9294 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9295 {
9296 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9297 if (tem)
9298 return tem;
9299 }
9300
9301 /* Simplify comparison of something with itself. (For IEEE
9302 floating-point, we can only do some of these simplifications.) */
9303 if (operand_equal_p (arg0, arg1, 0))
9304 {
9305 switch (code)
9306 {
9307 case EQ_EXPR:
9308 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9309 || ! HONOR_NANS (arg0))
9310 return constant_boolean_node (1, type);
9311 break;
9312
9313 case GE_EXPR:
9314 case LE_EXPR:
9315 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9316 || ! HONOR_NANS (arg0))
9317 return constant_boolean_node (1, type);
9318 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9319
9320 case NE_EXPR:
9321 /* For NE, we can only do this simplification if integer
9322 or we don't honor IEEE floating point NaNs. */
9323 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9324 && HONOR_NANS (arg0))
9325 break;
9326 /* ... fall through ... */
9327 case GT_EXPR:
9328 case LT_EXPR:
9329 return constant_boolean_node (0, type);
9330 default:
9331 gcc_unreachable ();
9332 }
9333 }
9334
9335 /* If we are comparing an expression that just has comparisons
9336 of two integer values, arithmetic expressions of those comparisons,
9337 and constants, we can simplify it. There are only three cases
9338 to check: the two values can either be equal, the first can be
9339 greater, or the second can be greater. Fold the expression for
9340 those three values. Since each value must be 0 or 1, we have
9341 eight possibilities, each of which corresponds to the constant 0
9342 or 1 or one of the six possible comparisons.
9343
9344 This handles common cases like (a > b) == 0 but also handles
9345 expressions like ((x > y) - (y > x)) > 0, which supposedly
9346 occur in macroized code. */
9347
9348 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9349 {
9350 tree cval1 = 0, cval2 = 0;
9351 int save_p = 0;
9352
9353 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9354 /* Don't handle degenerate cases here; they should already
9355 have been handled anyway. */
9356 && cval1 != 0 && cval2 != 0
9357 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9358 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9359 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9360 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9361 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9362 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9363 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9364 {
9365 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9366 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9367
9368 /* We can't just pass T to eval_subst in case cval1 or cval2
9369 was the same as ARG1. */
9370
9371 tree high_result
9372 = fold_build2_loc (loc, code, type,
9373 eval_subst (loc, arg0, cval1, maxval,
9374 cval2, minval),
9375 arg1);
9376 tree equal_result
9377 = fold_build2_loc (loc, code, type,
9378 eval_subst (loc, arg0, cval1, maxval,
9379 cval2, maxval),
9380 arg1);
9381 tree low_result
9382 = fold_build2_loc (loc, code, type,
9383 eval_subst (loc, arg0, cval1, minval,
9384 cval2, maxval),
9385 arg1);
9386
9387 /* All three of these results should be 0 or 1. Confirm they are.
9388 Then use those values to select the proper code to use. */
9389
9390 if (TREE_CODE (high_result) == INTEGER_CST
9391 && TREE_CODE (equal_result) == INTEGER_CST
9392 && TREE_CODE (low_result) == INTEGER_CST)
9393 {
9394 /* Make a 3-bit mask with the high-order bit being the
9395 value for `>', the next for '=', and the low for '<'. */
9396 switch ((integer_onep (high_result) * 4)
9397 + (integer_onep (equal_result) * 2)
9398 + integer_onep (low_result))
9399 {
9400 case 0:
9401 /* Always false. */
9402 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9403 case 1:
9404 code = LT_EXPR;
9405 break;
9406 case 2:
9407 code = EQ_EXPR;
9408 break;
9409 case 3:
9410 code = LE_EXPR;
9411 break;
9412 case 4:
9413 code = GT_EXPR;
9414 break;
9415 case 5:
9416 code = NE_EXPR;
9417 break;
9418 case 6:
9419 code = GE_EXPR;
9420 break;
9421 case 7:
9422 /* Always true. */
9423 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9424 }
9425
9426 if (save_p)
9427 {
9428 tem = save_expr (build2 (code, type, cval1, cval2));
9429 SET_EXPR_LOCATION (tem, loc);
9430 return tem;
9431 }
9432 return fold_build2_loc (loc, code, type, cval1, cval2);
9433 }
9434 }
9435 }
9436
9437 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9438 into a single range test. */
9439 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9440 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9441 && TREE_CODE (arg1) == INTEGER_CST
9442 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9443 && !integer_zerop (TREE_OPERAND (arg0, 1))
9444 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9445 && !TREE_OVERFLOW (arg1))
9446 {
9447 tem = fold_div_compare (loc, code, type, arg0, arg1);
9448 if (tem != NULL_TREE)
9449 return tem;
9450 }
9451
9452 /* Fold ~X op ~Y as Y op X. */
9453 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9454 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9455 {
9456 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9457 return fold_build2_loc (loc, code, type,
9458 fold_convert_loc (loc, cmp_type,
9459 TREE_OPERAND (arg1, 0)),
9460 TREE_OPERAND (arg0, 0));
9461 }
9462
9463 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9464 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9465 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9466 {
9467 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9468 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9469 TREE_OPERAND (arg0, 0),
9470 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9471 fold_convert_loc (loc, cmp_type, arg1)));
9472 }
9473
9474 return NULL_TREE;
9475 }
9476
9477
9478 /* Subroutine of fold_binary. Optimize complex multiplications of the
9479 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9480 argument EXPR represents the expression "z" of type TYPE. */
9481
9482 static tree
9483 fold_mult_zconjz (location_t loc, tree type, tree expr)
9484 {
9485 tree itype = TREE_TYPE (type);
9486 tree rpart, ipart, tem;
9487
9488 if (TREE_CODE (expr) == COMPLEX_EXPR)
9489 {
9490 rpart = TREE_OPERAND (expr, 0);
9491 ipart = TREE_OPERAND (expr, 1);
9492 }
9493 else if (TREE_CODE (expr) == COMPLEX_CST)
9494 {
9495 rpart = TREE_REALPART (expr);
9496 ipart = TREE_IMAGPART (expr);
9497 }
9498 else
9499 {
9500 expr = save_expr (expr);
9501 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9502 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9503 }
9504
9505 rpart = save_expr (rpart);
9506 ipart = save_expr (ipart);
9507 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9508 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9509 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9510 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9511 build_zero_cst (itype));
9512 }
9513
9514
9515 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9516 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9517 guarantees that P and N have the same least significant log2(M) bits.
9518 N is not otherwise constrained. In particular, N is not normalized to
9519 0 <= N < M as is common. In general, the precise value of P is unknown.
9520 M is chosen as large as possible such that constant N can be determined.
9521
9522 Returns M and sets *RESIDUE to N.
9523
9524 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9525 account. This is not always possible due to PR 35705.
9526 */
9527
9528 static unsigned HOST_WIDE_INT
9529 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9530 bool allow_func_align)
9531 {
9532 enum tree_code code;
9533
9534 *residue = 0;
9535
9536 code = TREE_CODE (expr);
9537 if (code == ADDR_EXPR)
9538 {
9539 unsigned int bitalign;
9540 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9541 *residue /= BITS_PER_UNIT;
9542 return bitalign / BITS_PER_UNIT;
9543 }
9544 else if (code == POINTER_PLUS_EXPR)
9545 {
9546 tree op0, op1;
9547 unsigned HOST_WIDE_INT modulus;
9548 enum tree_code inner_code;
9549
9550 op0 = TREE_OPERAND (expr, 0);
9551 STRIP_NOPS (op0);
9552 modulus = get_pointer_modulus_and_residue (op0, residue,
9553 allow_func_align);
9554
9555 op1 = TREE_OPERAND (expr, 1);
9556 STRIP_NOPS (op1);
9557 inner_code = TREE_CODE (op1);
9558 if (inner_code == INTEGER_CST)
9559 {
9560 *residue += TREE_INT_CST_LOW (op1);
9561 return modulus;
9562 }
9563 else if (inner_code == MULT_EXPR)
9564 {
9565 op1 = TREE_OPERAND (op1, 1);
9566 if (TREE_CODE (op1) == INTEGER_CST)
9567 {
9568 unsigned HOST_WIDE_INT align;
9569
9570 /* Compute the greatest power-of-2 divisor of op1. */
9571 align = TREE_INT_CST_LOW (op1);
9572 align &= -align;
9573
9574 /* If align is non-zero and less than *modulus, replace
9575 *modulus with align., If align is 0, then either op1 is 0
9576 or the greatest power-of-2 divisor of op1 doesn't fit in an
9577 unsigned HOST_WIDE_INT. In either case, no additional
9578 constraint is imposed. */
9579 if (align)
9580 modulus = MIN (modulus, align);
9581
9582 return modulus;
9583 }
9584 }
9585 }
9586
9587 /* If we get here, we were unable to determine anything useful about the
9588 expression. */
9589 return 1;
9590 }
9591
9592 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9593 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9594
9595 static bool
9596 vec_cst_ctor_to_array (tree arg, tree *elts)
9597 {
9598 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9599
9600 if (TREE_CODE (arg) == VECTOR_CST)
9601 {
9602 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9603 elts[i] = VECTOR_CST_ELT (arg, i);
9604 }
9605 else if (TREE_CODE (arg) == CONSTRUCTOR)
9606 {
9607 constructor_elt *elt;
9608
9609 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9610 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9611 return false;
9612 else
9613 elts[i] = elt->value;
9614 }
9615 else
9616 return false;
9617 for (; i < nelts; i++)
9618 elts[i]
9619 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9620 return true;
9621 }
9622
9623 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9624 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9625 NULL_TREE otherwise. */
9626
9627 static tree
9628 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9629 {
9630 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9631 tree *elts;
9632 bool need_ctor = false;
9633
9634 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9635 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9636 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9637 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9638 return NULL_TREE;
9639
9640 elts = XALLOCAVEC (tree, nelts * 3);
9641 if (!vec_cst_ctor_to_array (arg0, elts)
9642 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9643 return NULL_TREE;
9644
9645 for (i = 0; i < nelts; i++)
9646 {
9647 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9648 need_ctor = true;
9649 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9650 }
9651
9652 if (need_ctor)
9653 {
9654 vec<constructor_elt, va_gc> *v;
9655 vec_alloc (v, nelts);
9656 for (i = 0; i < nelts; i++)
9657 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9658 return build_constructor (type, v);
9659 }
9660 else
9661 return build_vector (type, &elts[2 * nelts]);
9662 }
9663
9664 /* Try to fold a pointer difference of type TYPE two address expressions of
9665 array references AREF0 and AREF1 using location LOC. Return a
9666 simplified expression for the difference or NULL_TREE. */
9667
9668 static tree
9669 fold_addr_of_array_ref_difference (location_t loc, tree type,
9670 tree aref0, tree aref1)
9671 {
9672 tree base0 = TREE_OPERAND (aref0, 0);
9673 tree base1 = TREE_OPERAND (aref1, 0);
9674 tree base_offset = build_int_cst (type, 0);
9675
9676 /* If the bases are array references as well, recurse. If the bases
9677 are pointer indirections compute the difference of the pointers.
9678 If the bases are equal, we are set. */
9679 if ((TREE_CODE (base0) == ARRAY_REF
9680 && TREE_CODE (base1) == ARRAY_REF
9681 && (base_offset
9682 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9683 || (INDIRECT_REF_P (base0)
9684 && INDIRECT_REF_P (base1)
9685 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9686 TREE_OPERAND (base0, 0),
9687 TREE_OPERAND (base1, 0))))
9688 || operand_equal_p (base0, base1, 0))
9689 {
9690 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9691 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9692 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9693 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9694 return fold_build2_loc (loc, PLUS_EXPR, type,
9695 base_offset,
9696 fold_build2_loc (loc, MULT_EXPR, type,
9697 diff, esz));
9698 }
9699 return NULL_TREE;
9700 }
9701
9702 /* If the real or vector real constant CST of type TYPE has an exact
9703 inverse, return it, else return NULL. */
9704
9705 tree
9706 exact_inverse (tree type, tree cst)
9707 {
9708 REAL_VALUE_TYPE r;
9709 tree unit_type, *elts;
9710 machine_mode mode;
9711 unsigned vec_nelts, i;
9712
9713 switch (TREE_CODE (cst))
9714 {
9715 case REAL_CST:
9716 r = TREE_REAL_CST (cst);
9717
9718 if (exact_real_inverse (TYPE_MODE (type), &r))
9719 return build_real (type, r);
9720
9721 return NULL_TREE;
9722
9723 case VECTOR_CST:
9724 vec_nelts = VECTOR_CST_NELTS (cst);
9725 elts = XALLOCAVEC (tree, vec_nelts);
9726 unit_type = TREE_TYPE (type);
9727 mode = TYPE_MODE (unit_type);
9728
9729 for (i = 0; i < vec_nelts; i++)
9730 {
9731 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9732 if (!exact_real_inverse (mode, &r))
9733 return NULL_TREE;
9734 elts[i] = build_real (unit_type, r);
9735 }
9736
9737 return build_vector (type, elts);
9738
9739 default:
9740 return NULL_TREE;
9741 }
9742 }
9743
9744 /* Mask out the tz least significant bits of X of type TYPE where
9745 tz is the number of trailing zeroes in Y. */
9746 static wide_int
9747 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9748 {
9749 int tz = wi::ctz (y);
9750 if (tz > 0)
9751 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9752 return x;
9753 }
9754
9755 /* Return true when T is an address and is known to be nonzero.
9756 For floating point we further ensure that T is not denormal.
9757 Similar logic is present in nonzero_address in rtlanal.h.
9758
9759 If the return value is based on the assumption that signed overflow
9760 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9761 change *STRICT_OVERFLOW_P. */
9762
9763 static bool
9764 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9765 {
9766 tree type = TREE_TYPE (t);
9767 enum tree_code code;
9768
9769 /* Doing something useful for floating point would need more work. */
9770 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9771 return false;
9772
9773 code = TREE_CODE (t);
9774 switch (TREE_CODE_CLASS (code))
9775 {
9776 case tcc_unary:
9777 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9778 strict_overflow_p);
9779 case tcc_binary:
9780 case tcc_comparison:
9781 return tree_binary_nonzero_warnv_p (code, type,
9782 TREE_OPERAND (t, 0),
9783 TREE_OPERAND (t, 1),
9784 strict_overflow_p);
9785 case tcc_constant:
9786 case tcc_declaration:
9787 case tcc_reference:
9788 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9789
9790 default:
9791 break;
9792 }
9793
9794 switch (code)
9795 {
9796 case TRUTH_NOT_EXPR:
9797 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9798 strict_overflow_p);
9799
9800 case TRUTH_AND_EXPR:
9801 case TRUTH_OR_EXPR:
9802 case TRUTH_XOR_EXPR:
9803 return tree_binary_nonzero_warnv_p (code, type,
9804 TREE_OPERAND (t, 0),
9805 TREE_OPERAND (t, 1),
9806 strict_overflow_p);
9807
9808 case COND_EXPR:
9809 case CONSTRUCTOR:
9810 case OBJ_TYPE_REF:
9811 case ASSERT_EXPR:
9812 case ADDR_EXPR:
9813 case WITH_SIZE_EXPR:
9814 case SSA_NAME:
9815 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9816
9817 case COMPOUND_EXPR:
9818 case MODIFY_EXPR:
9819 case BIND_EXPR:
9820 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9821 strict_overflow_p);
9822
9823 case SAVE_EXPR:
9824 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9825 strict_overflow_p);
9826
9827 case CALL_EXPR:
9828 {
9829 tree fndecl = get_callee_fndecl (t);
9830 if (!fndecl) return false;
9831 if (flag_delete_null_pointer_checks && !flag_check_new
9832 && DECL_IS_OPERATOR_NEW (fndecl)
9833 && !TREE_NOTHROW (fndecl))
9834 return true;
9835 if (flag_delete_null_pointer_checks
9836 && lookup_attribute ("returns_nonnull",
9837 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9838 return true;
9839 return alloca_call_p (t);
9840 }
9841
9842 default:
9843 break;
9844 }
9845 return false;
9846 }
9847
9848 /* Return true when T is an address and is known to be nonzero.
9849 Handle warnings about undefined signed overflow. */
9850
9851 static bool
9852 tree_expr_nonzero_p (tree t)
9853 {
9854 bool ret, strict_overflow_p;
9855
9856 strict_overflow_p = false;
9857 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9858 if (strict_overflow_p)
9859 fold_overflow_warning (("assuming signed overflow does not occur when "
9860 "determining that expression is always "
9861 "non-zero"),
9862 WARN_STRICT_OVERFLOW_MISC);
9863 return ret;
9864 }
9865
9866 /* Fold a binary expression of code CODE and type TYPE with operands
9867 OP0 and OP1. LOC is the location of the resulting expression.
9868 Return the folded expression if folding is successful. Otherwise,
9869 return NULL_TREE. */
9870
9871 tree
9872 fold_binary_loc (location_t loc,
9873 enum tree_code code, tree type, tree op0, tree op1)
9874 {
9875 enum tree_code_class kind = TREE_CODE_CLASS (code);
9876 tree arg0, arg1, tem;
9877 tree t1 = NULL_TREE;
9878 bool strict_overflow_p;
9879 unsigned int prec;
9880
9881 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9882 && TREE_CODE_LENGTH (code) == 2
9883 && op0 != NULL_TREE
9884 && op1 != NULL_TREE);
9885
9886 arg0 = op0;
9887 arg1 = op1;
9888
9889 /* Strip any conversions that don't change the mode. This is
9890 safe for every expression, except for a comparison expression
9891 because its signedness is derived from its operands. So, in
9892 the latter case, only strip conversions that don't change the
9893 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9894 preserved.
9895
9896 Note that this is done as an internal manipulation within the
9897 constant folder, in order to find the simplest representation
9898 of the arguments so that their form can be studied. In any
9899 cases, the appropriate type conversions should be put back in
9900 the tree that will get out of the constant folder. */
9901
9902 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9903 {
9904 STRIP_SIGN_NOPS (arg0);
9905 STRIP_SIGN_NOPS (arg1);
9906 }
9907 else
9908 {
9909 STRIP_NOPS (arg0);
9910 STRIP_NOPS (arg1);
9911 }
9912
9913 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9914 constant but we can't do arithmetic on them. */
9915 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9916 {
9917 tem = const_binop (code, type, arg0, arg1);
9918 if (tem != NULL_TREE)
9919 {
9920 if (TREE_TYPE (tem) != type)
9921 tem = fold_convert_loc (loc, type, tem);
9922 return tem;
9923 }
9924 }
9925
9926 /* If this is a commutative operation, and ARG0 is a constant, move it
9927 to ARG1 to reduce the number of tests below. */
9928 if (commutative_tree_code (code)
9929 && tree_swap_operands_p (arg0, arg1, true))
9930 return fold_build2_loc (loc, code, type, op1, op0);
9931
9932 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9933 to ARG1 to reduce the number of tests below. */
9934 if (kind == tcc_comparison
9935 && tree_swap_operands_p (arg0, arg1, true))
9936 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9937
9938 tem = generic_simplify (loc, code, type, op0, op1);
9939 if (tem)
9940 return tem;
9941
9942 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9943
9944 First check for cases where an arithmetic operation is applied to a
9945 compound, conditional, or comparison operation. Push the arithmetic
9946 operation inside the compound or conditional to see if any folding
9947 can then be done. Convert comparison to conditional for this purpose.
9948 The also optimizes non-constant cases that used to be done in
9949 expand_expr.
9950
9951 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9952 one of the operands is a comparison and the other is a comparison, a
9953 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9954 code below would make the expression more complex. Change it to a
9955 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9956 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9957
9958 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9959 || code == EQ_EXPR || code == NE_EXPR)
9960 && TREE_CODE (type) != VECTOR_TYPE
9961 && ((truth_value_p (TREE_CODE (arg0))
9962 && (truth_value_p (TREE_CODE (arg1))
9963 || (TREE_CODE (arg1) == BIT_AND_EXPR
9964 && integer_onep (TREE_OPERAND (arg1, 1)))))
9965 || (truth_value_p (TREE_CODE (arg1))
9966 && (truth_value_p (TREE_CODE (arg0))
9967 || (TREE_CODE (arg0) == BIT_AND_EXPR
9968 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9969 {
9970 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9971 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9972 : TRUTH_XOR_EXPR,
9973 boolean_type_node,
9974 fold_convert_loc (loc, boolean_type_node, arg0),
9975 fold_convert_loc (loc, boolean_type_node, arg1));
9976
9977 if (code == EQ_EXPR)
9978 tem = invert_truthvalue_loc (loc, tem);
9979
9980 return fold_convert_loc (loc, type, tem);
9981 }
9982
9983 if (TREE_CODE_CLASS (code) == tcc_binary
9984 || TREE_CODE_CLASS (code) == tcc_comparison)
9985 {
9986 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9987 {
9988 tem = fold_build2_loc (loc, code, type,
9989 fold_convert_loc (loc, TREE_TYPE (op0),
9990 TREE_OPERAND (arg0, 1)), op1);
9991 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9992 tem);
9993 }
9994 if (TREE_CODE (arg1) == COMPOUND_EXPR
9995 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9996 {
9997 tem = fold_build2_loc (loc, code, type, op0,
9998 fold_convert_loc (loc, TREE_TYPE (op1),
9999 TREE_OPERAND (arg1, 1)));
10000 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
10001 tem);
10002 }
10003
10004 if (TREE_CODE (arg0) == COND_EXPR
10005 || TREE_CODE (arg0) == VEC_COND_EXPR
10006 || COMPARISON_CLASS_P (arg0))
10007 {
10008 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10009 arg0, arg1,
10010 /*cond_first_p=*/1);
10011 if (tem != NULL_TREE)
10012 return tem;
10013 }
10014
10015 if (TREE_CODE (arg1) == COND_EXPR
10016 || TREE_CODE (arg1) == VEC_COND_EXPR
10017 || COMPARISON_CLASS_P (arg1))
10018 {
10019 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10020 arg1, arg0,
10021 /*cond_first_p=*/0);
10022 if (tem != NULL_TREE)
10023 return tem;
10024 }
10025 }
10026
10027 switch (code)
10028 {
10029 case MEM_REF:
10030 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10031 if (TREE_CODE (arg0) == ADDR_EXPR
10032 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10033 {
10034 tree iref = TREE_OPERAND (arg0, 0);
10035 return fold_build2 (MEM_REF, type,
10036 TREE_OPERAND (iref, 0),
10037 int_const_binop (PLUS_EXPR, arg1,
10038 TREE_OPERAND (iref, 1)));
10039 }
10040
10041 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10042 if (TREE_CODE (arg0) == ADDR_EXPR
10043 && handled_component_p (TREE_OPERAND (arg0, 0)))
10044 {
10045 tree base;
10046 HOST_WIDE_INT coffset;
10047 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10048 &coffset);
10049 if (!base)
10050 return NULL_TREE;
10051 return fold_build2 (MEM_REF, type,
10052 build_fold_addr_expr (base),
10053 int_const_binop (PLUS_EXPR, arg1,
10054 size_int (coffset)));
10055 }
10056
10057 return NULL_TREE;
10058
10059 case POINTER_PLUS_EXPR:
10060 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10061 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10062 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10063 return fold_convert_loc (loc, type,
10064 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10065 fold_convert_loc (loc, sizetype,
10066 arg1),
10067 fold_convert_loc (loc, sizetype,
10068 arg0)));
10069
10070 return NULL_TREE;
10071
10072 case PLUS_EXPR:
10073 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10074 {
10075 /* X + (X / CST) * -CST is X % CST. */
10076 if (TREE_CODE (arg1) == MULT_EXPR
10077 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10078 && operand_equal_p (arg0,
10079 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10080 {
10081 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10082 tree cst1 = TREE_OPERAND (arg1, 1);
10083 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10084 cst1, cst0);
10085 if (sum && integer_zerop (sum))
10086 return fold_convert_loc (loc, type,
10087 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10088 TREE_TYPE (arg0), arg0,
10089 cst0));
10090 }
10091 }
10092
10093 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10094 one. Make sure the type is not saturating and has the signedness of
10095 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10096 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10097 if ((TREE_CODE (arg0) == MULT_EXPR
10098 || TREE_CODE (arg1) == MULT_EXPR)
10099 && !TYPE_SATURATING (type)
10100 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10101 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10102 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10103 {
10104 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10105 if (tem)
10106 return tem;
10107 }
10108
10109 if (! FLOAT_TYPE_P (type))
10110 {
10111 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10112 with a constant, and the two constants have no bits in common,
10113 we should treat this as a BIT_IOR_EXPR since this may produce more
10114 simplifications. */
10115 if (TREE_CODE (arg0) == BIT_AND_EXPR
10116 && TREE_CODE (arg1) == BIT_AND_EXPR
10117 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10118 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10119 && wi::bit_and (TREE_OPERAND (arg0, 1),
10120 TREE_OPERAND (arg1, 1)) == 0)
10121 {
10122 code = BIT_IOR_EXPR;
10123 goto bit_ior;
10124 }
10125
10126 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10127 (plus (plus (mult) (mult)) (foo)) so that we can
10128 take advantage of the factoring cases below. */
10129 if (ANY_INTEGRAL_TYPE_P (type)
10130 && TYPE_OVERFLOW_WRAPS (type)
10131 && (((TREE_CODE (arg0) == PLUS_EXPR
10132 || TREE_CODE (arg0) == MINUS_EXPR)
10133 && TREE_CODE (arg1) == MULT_EXPR)
10134 || ((TREE_CODE (arg1) == PLUS_EXPR
10135 || TREE_CODE (arg1) == MINUS_EXPR)
10136 && TREE_CODE (arg0) == MULT_EXPR)))
10137 {
10138 tree parg0, parg1, parg, marg;
10139 enum tree_code pcode;
10140
10141 if (TREE_CODE (arg1) == MULT_EXPR)
10142 parg = arg0, marg = arg1;
10143 else
10144 parg = arg1, marg = arg0;
10145 pcode = TREE_CODE (parg);
10146 parg0 = TREE_OPERAND (parg, 0);
10147 parg1 = TREE_OPERAND (parg, 1);
10148 STRIP_NOPS (parg0);
10149 STRIP_NOPS (parg1);
10150
10151 if (TREE_CODE (parg0) == MULT_EXPR
10152 && TREE_CODE (parg1) != MULT_EXPR)
10153 return fold_build2_loc (loc, pcode, type,
10154 fold_build2_loc (loc, PLUS_EXPR, type,
10155 fold_convert_loc (loc, type,
10156 parg0),
10157 fold_convert_loc (loc, type,
10158 marg)),
10159 fold_convert_loc (loc, type, parg1));
10160 if (TREE_CODE (parg0) != MULT_EXPR
10161 && TREE_CODE (parg1) == MULT_EXPR)
10162 return
10163 fold_build2_loc (loc, PLUS_EXPR, type,
10164 fold_convert_loc (loc, type, parg0),
10165 fold_build2_loc (loc, pcode, type,
10166 fold_convert_loc (loc, type, marg),
10167 fold_convert_loc (loc, type,
10168 parg1)));
10169 }
10170 }
10171 else
10172 {
10173 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10174 to __complex__ ( x, y ). This is not the same for SNaNs or
10175 if signed zeros are involved. */
10176 if (!HONOR_SNANS (element_mode (arg0))
10177 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10178 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10179 {
10180 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10181 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10182 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10183 bool arg0rz = false, arg0iz = false;
10184 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10185 || (arg0i && (arg0iz = real_zerop (arg0i))))
10186 {
10187 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10188 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10189 if (arg0rz && arg1i && real_zerop (arg1i))
10190 {
10191 tree rp = arg1r ? arg1r
10192 : build1 (REALPART_EXPR, rtype, arg1);
10193 tree ip = arg0i ? arg0i
10194 : build1 (IMAGPART_EXPR, rtype, arg0);
10195 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10196 }
10197 else if (arg0iz && arg1r && real_zerop (arg1r))
10198 {
10199 tree rp = arg0r ? arg0r
10200 : build1 (REALPART_EXPR, rtype, arg0);
10201 tree ip = arg1i ? arg1i
10202 : build1 (IMAGPART_EXPR, rtype, arg1);
10203 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10204 }
10205 }
10206 }
10207
10208 if (flag_unsafe_math_optimizations
10209 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10210 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10211 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10212 return tem;
10213
10214 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10215 We associate floats only if the user has specified
10216 -fassociative-math. */
10217 if (flag_associative_math
10218 && TREE_CODE (arg1) == PLUS_EXPR
10219 && TREE_CODE (arg0) != MULT_EXPR)
10220 {
10221 tree tree10 = TREE_OPERAND (arg1, 0);
10222 tree tree11 = TREE_OPERAND (arg1, 1);
10223 if (TREE_CODE (tree11) == MULT_EXPR
10224 && TREE_CODE (tree10) == MULT_EXPR)
10225 {
10226 tree tree0;
10227 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10228 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10229 }
10230 }
10231 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10232 We associate floats only if the user has specified
10233 -fassociative-math. */
10234 if (flag_associative_math
10235 && TREE_CODE (arg0) == PLUS_EXPR
10236 && TREE_CODE (arg1) != MULT_EXPR)
10237 {
10238 tree tree00 = TREE_OPERAND (arg0, 0);
10239 tree tree01 = TREE_OPERAND (arg0, 1);
10240 if (TREE_CODE (tree01) == MULT_EXPR
10241 && TREE_CODE (tree00) == MULT_EXPR)
10242 {
10243 tree tree0;
10244 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10245 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10246 }
10247 }
10248 }
10249
10250 bit_rotate:
10251 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10252 is a rotate of A by C1 bits. */
10253 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10254 is a rotate of A by B bits. */
10255 {
10256 enum tree_code code0, code1;
10257 tree rtype;
10258 code0 = TREE_CODE (arg0);
10259 code1 = TREE_CODE (arg1);
10260 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10261 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10262 && operand_equal_p (TREE_OPERAND (arg0, 0),
10263 TREE_OPERAND (arg1, 0), 0)
10264 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10265 TYPE_UNSIGNED (rtype))
10266 /* Only create rotates in complete modes. Other cases are not
10267 expanded properly. */
10268 && (element_precision (rtype)
10269 == element_precision (TYPE_MODE (rtype))))
10270 {
10271 tree tree01, tree11;
10272 enum tree_code code01, code11;
10273
10274 tree01 = TREE_OPERAND (arg0, 1);
10275 tree11 = TREE_OPERAND (arg1, 1);
10276 STRIP_NOPS (tree01);
10277 STRIP_NOPS (tree11);
10278 code01 = TREE_CODE (tree01);
10279 code11 = TREE_CODE (tree11);
10280 if (code01 == INTEGER_CST
10281 && code11 == INTEGER_CST
10282 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10283 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10284 {
10285 tem = build2_loc (loc, LROTATE_EXPR,
10286 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10287 TREE_OPERAND (arg0, 0),
10288 code0 == LSHIFT_EXPR
10289 ? TREE_OPERAND (arg0, 1)
10290 : TREE_OPERAND (arg1, 1));
10291 return fold_convert_loc (loc, type, tem);
10292 }
10293 else if (code11 == MINUS_EXPR)
10294 {
10295 tree tree110, tree111;
10296 tree110 = TREE_OPERAND (tree11, 0);
10297 tree111 = TREE_OPERAND (tree11, 1);
10298 STRIP_NOPS (tree110);
10299 STRIP_NOPS (tree111);
10300 if (TREE_CODE (tree110) == INTEGER_CST
10301 && 0 == compare_tree_int (tree110,
10302 element_precision
10303 (TREE_TYPE (TREE_OPERAND
10304 (arg0, 0))))
10305 && operand_equal_p (tree01, tree111, 0))
10306 return
10307 fold_convert_loc (loc, type,
10308 build2 ((code0 == LSHIFT_EXPR
10309 ? LROTATE_EXPR
10310 : RROTATE_EXPR),
10311 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10312 TREE_OPERAND (arg0, 0),
10313 TREE_OPERAND (arg0, 1)));
10314 }
10315 else if (code01 == MINUS_EXPR)
10316 {
10317 tree tree010, tree011;
10318 tree010 = TREE_OPERAND (tree01, 0);
10319 tree011 = TREE_OPERAND (tree01, 1);
10320 STRIP_NOPS (tree010);
10321 STRIP_NOPS (tree011);
10322 if (TREE_CODE (tree010) == INTEGER_CST
10323 && 0 == compare_tree_int (tree010,
10324 element_precision
10325 (TREE_TYPE (TREE_OPERAND
10326 (arg0, 0))))
10327 && operand_equal_p (tree11, tree011, 0))
10328 return fold_convert_loc
10329 (loc, type,
10330 build2 ((code0 != LSHIFT_EXPR
10331 ? LROTATE_EXPR
10332 : RROTATE_EXPR),
10333 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10334 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10335 }
10336 }
10337 }
10338
10339 associate:
10340 /* In most languages, can't associate operations on floats through
10341 parentheses. Rather than remember where the parentheses were, we
10342 don't associate floats at all, unless the user has specified
10343 -fassociative-math.
10344 And, we need to make sure type is not saturating. */
10345
10346 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10347 && !TYPE_SATURATING (type))
10348 {
10349 tree var0, con0, lit0, minus_lit0;
10350 tree var1, con1, lit1, minus_lit1;
10351 tree atype = type;
10352 bool ok = true;
10353
10354 /* Split both trees into variables, constants, and literals. Then
10355 associate each group together, the constants with literals,
10356 then the result with variables. This increases the chances of
10357 literals being recombined later and of generating relocatable
10358 expressions for the sum of a constant and literal. */
10359 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10360 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10361 code == MINUS_EXPR);
10362
10363 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10364 if (code == MINUS_EXPR)
10365 code = PLUS_EXPR;
10366
10367 /* With undefined overflow prefer doing association in a type
10368 which wraps on overflow, if that is one of the operand types. */
10369 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10370 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10371 {
10372 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10373 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10374 atype = TREE_TYPE (arg0);
10375 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10376 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10377 atype = TREE_TYPE (arg1);
10378 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10379 }
10380
10381 /* With undefined overflow we can only associate constants with one
10382 variable, and constants whose association doesn't overflow. */
10383 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10384 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10385 {
10386 if (var0 && var1)
10387 {
10388 tree tmp0 = var0;
10389 tree tmp1 = var1;
10390
10391 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10392 tmp0 = TREE_OPERAND (tmp0, 0);
10393 if (CONVERT_EXPR_P (tmp0)
10394 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10395 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10396 <= TYPE_PRECISION (atype)))
10397 tmp0 = TREE_OPERAND (tmp0, 0);
10398 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10399 tmp1 = TREE_OPERAND (tmp1, 0);
10400 if (CONVERT_EXPR_P (tmp1)
10401 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10402 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10403 <= TYPE_PRECISION (atype)))
10404 tmp1 = TREE_OPERAND (tmp1, 0);
10405 /* The only case we can still associate with two variables
10406 is if they are the same, modulo negation and bit-pattern
10407 preserving conversions. */
10408 if (!operand_equal_p (tmp0, tmp1, 0))
10409 ok = false;
10410 }
10411 }
10412
10413 /* Only do something if we found more than two objects. Otherwise,
10414 nothing has changed and we risk infinite recursion. */
10415 if (ok
10416 && (2 < ((var0 != 0) + (var1 != 0)
10417 + (con0 != 0) + (con1 != 0)
10418 + (lit0 != 0) + (lit1 != 0)
10419 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10420 {
10421 bool any_overflows = false;
10422 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10423 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10424 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10425 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10426 var0 = associate_trees (loc, var0, var1, code, atype);
10427 con0 = associate_trees (loc, con0, con1, code, atype);
10428 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10429 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10430 code, atype);
10431
10432 /* Preserve the MINUS_EXPR if the negative part of the literal is
10433 greater than the positive part. Otherwise, the multiplicative
10434 folding code (i.e extract_muldiv) may be fooled in case
10435 unsigned constants are subtracted, like in the following
10436 example: ((X*2 + 4) - 8U)/2. */
10437 if (minus_lit0 && lit0)
10438 {
10439 if (TREE_CODE (lit0) == INTEGER_CST
10440 && TREE_CODE (minus_lit0) == INTEGER_CST
10441 && tree_int_cst_lt (lit0, minus_lit0))
10442 {
10443 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10444 MINUS_EXPR, atype);
10445 lit0 = 0;
10446 }
10447 else
10448 {
10449 lit0 = associate_trees (loc, lit0, minus_lit0,
10450 MINUS_EXPR, atype);
10451 minus_lit0 = 0;
10452 }
10453 }
10454
10455 /* Don't introduce overflows through reassociation. */
10456 if (!any_overflows
10457 && ((lit0 && TREE_OVERFLOW_P (lit0))
10458 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10459 return NULL_TREE;
10460
10461 if (minus_lit0)
10462 {
10463 if (con0 == 0)
10464 return
10465 fold_convert_loc (loc, type,
10466 associate_trees (loc, var0, minus_lit0,
10467 MINUS_EXPR, atype));
10468 else
10469 {
10470 con0 = associate_trees (loc, con0, minus_lit0,
10471 MINUS_EXPR, atype);
10472 return
10473 fold_convert_loc (loc, type,
10474 associate_trees (loc, var0, con0,
10475 PLUS_EXPR, atype));
10476 }
10477 }
10478
10479 con0 = associate_trees (loc, con0, lit0, code, atype);
10480 return
10481 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10482 code, atype));
10483 }
10484 }
10485
10486 return NULL_TREE;
10487
10488 case MINUS_EXPR:
10489 /* Pointer simplifications for subtraction, simple reassociations. */
10490 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10491 {
10492 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10493 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10494 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10495 {
10496 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10497 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10498 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10499 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10500 return fold_build2_loc (loc, PLUS_EXPR, type,
10501 fold_build2_loc (loc, MINUS_EXPR, type,
10502 arg00, arg10),
10503 fold_build2_loc (loc, MINUS_EXPR, type,
10504 arg01, arg11));
10505 }
10506 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10507 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10508 {
10509 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10510 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10511 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10512 fold_convert_loc (loc, type, arg1));
10513 if (tmp)
10514 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10515 }
10516 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10517 simplifies. */
10518 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10519 {
10520 tree arg10 = fold_convert_loc (loc, type,
10521 TREE_OPERAND (arg1, 0));
10522 tree arg11 = fold_convert_loc (loc, type,
10523 TREE_OPERAND (arg1, 1));
10524 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10525 fold_convert_loc (loc, type, arg0),
10526 arg10);
10527 if (tmp)
10528 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10529 }
10530 }
10531 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10532 if (TREE_CODE (arg0) == NEGATE_EXPR
10533 && negate_expr_p (arg1)
10534 && reorder_operands_p (arg0, arg1))
10535 return fold_build2_loc (loc, MINUS_EXPR, type,
10536 fold_convert_loc (loc, type,
10537 negate_expr (arg1)),
10538 fold_convert_loc (loc, type,
10539 TREE_OPERAND (arg0, 0)));
10540
10541 /* X - (X / Y) * Y is X % Y. */
10542 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10543 && TREE_CODE (arg1) == MULT_EXPR
10544 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10545 && operand_equal_p (arg0,
10546 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10547 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10548 TREE_OPERAND (arg1, 1), 0))
10549 return
10550 fold_convert_loc (loc, type,
10551 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10552 arg0, TREE_OPERAND (arg1, 1)));
10553
10554 if (! FLOAT_TYPE_P (type))
10555 {
10556 /* Fold A - (A & B) into ~B & A. */
10557 if (!TREE_SIDE_EFFECTS (arg0)
10558 && TREE_CODE (arg1) == BIT_AND_EXPR)
10559 {
10560 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10561 {
10562 tree arg10 = fold_convert_loc (loc, type,
10563 TREE_OPERAND (arg1, 0));
10564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10565 fold_build1_loc (loc, BIT_NOT_EXPR,
10566 type, arg10),
10567 fold_convert_loc (loc, type, arg0));
10568 }
10569 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10570 {
10571 tree arg11 = fold_convert_loc (loc,
10572 type, TREE_OPERAND (arg1, 1));
10573 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10574 fold_build1_loc (loc, BIT_NOT_EXPR,
10575 type, arg11),
10576 fold_convert_loc (loc, type, arg0));
10577 }
10578 }
10579
10580 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10581 any power of 2 minus 1. */
10582 if (TREE_CODE (arg0) == BIT_AND_EXPR
10583 && TREE_CODE (arg1) == BIT_AND_EXPR
10584 && operand_equal_p (TREE_OPERAND (arg0, 0),
10585 TREE_OPERAND (arg1, 0), 0))
10586 {
10587 tree mask0 = TREE_OPERAND (arg0, 1);
10588 tree mask1 = TREE_OPERAND (arg1, 1);
10589 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10590
10591 if (operand_equal_p (tem, mask1, 0))
10592 {
10593 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10594 TREE_OPERAND (arg0, 0), mask1);
10595 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10596 }
10597 }
10598 }
10599
10600 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10601 __complex__ ( x, -y ). This is not the same for SNaNs or if
10602 signed zeros are involved. */
10603 if (!HONOR_SNANS (element_mode (arg0))
10604 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10605 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10606 {
10607 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10608 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10609 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10610 bool arg0rz = false, arg0iz = false;
10611 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10612 || (arg0i && (arg0iz = real_zerop (arg0i))))
10613 {
10614 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10615 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10616 if (arg0rz && arg1i && real_zerop (arg1i))
10617 {
10618 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10619 arg1r ? arg1r
10620 : build1 (REALPART_EXPR, rtype, arg1));
10621 tree ip = arg0i ? arg0i
10622 : build1 (IMAGPART_EXPR, rtype, arg0);
10623 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10624 }
10625 else if (arg0iz && arg1r && real_zerop (arg1r))
10626 {
10627 tree rp = arg0r ? arg0r
10628 : build1 (REALPART_EXPR, rtype, arg0);
10629 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10630 arg1i ? arg1i
10631 : build1 (IMAGPART_EXPR, rtype, arg1));
10632 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10633 }
10634 }
10635 }
10636
10637 /* A - B -> A + (-B) if B is easily negatable. */
10638 if (negate_expr_p (arg1)
10639 && !TYPE_OVERFLOW_SANITIZED (type)
10640 && ((FLOAT_TYPE_P (type)
10641 /* Avoid this transformation if B is a positive REAL_CST. */
10642 && (TREE_CODE (arg1) != REAL_CST
10643 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10644 || INTEGRAL_TYPE_P (type)))
10645 return fold_build2_loc (loc, PLUS_EXPR, type,
10646 fold_convert_loc (loc, type, arg0),
10647 fold_convert_loc (loc, type,
10648 negate_expr (arg1)));
10649
10650 /* Try folding difference of addresses. */
10651 {
10652 HOST_WIDE_INT diff;
10653
10654 if ((TREE_CODE (arg0) == ADDR_EXPR
10655 || TREE_CODE (arg1) == ADDR_EXPR)
10656 && ptr_difference_const (arg0, arg1, &diff))
10657 return build_int_cst_type (type, diff);
10658 }
10659
10660 /* Fold &a[i] - &a[j] to i-j. */
10661 if (TREE_CODE (arg0) == ADDR_EXPR
10662 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10663 && TREE_CODE (arg1) == ADDR_EXPR
10664 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10665 {
10666 tree tem = fold_addr_of_array_ref_difference (loc, type,
10667 TREE_OPERAND (arg0, 0),
10668 TREE_OPERAND (arg1, 0));
10669 if (tem)
10670 return tem;
10671 }
10672
10673 if (FLOAT_TYPE_P (type)
10674 && flag_unsafe_math_optimizations
10675 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10676 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10677 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10678 return tem;
10679
10680 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10681 one. Make sure the type is not saturating and has the signedness of
10682 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10683 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10684 if ((TREE_CODE (arg0) == MULT_EXPR
10685 || TREE_CODE (arg1) == MULT_EXPR)
10686 && !TYPE_SATURATING (type)
10687 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10688 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10689 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10690 {
10691 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10692 if (tem)
10693 return tem;
10694 }
10695
10696 goto associate;
10697
10698 case MULT_EXPR:
10699 /* (-A) * (-B) -> A * B */
10700 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10701 return fold_build2_loc (loc, MULT_EXPR, type,
10702 fold_convert_loc (loc, type,
10703 TREE_OPERAND (arg0, 0)),
10704 fold_convert_loc (loc, type,
10705 negate_expr (arg1)));
10706 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10707 return fold_build2_loc (loc, MULT_EXPR, type,
10708 fold_convert_loc (loc, type,
10709 negate_expr (arg0)),
10710 fold_convert_loc (loc, type,
10711 TREE_OPERAND (arg1, 0)));
10712
10713 if (! FLOAT_TYPE_P (type))
10714 {
10715 /* Transform x * -C into -x * C if x is easily negatable. */
10716 if (TREE_CODE (arg1) == INTEGER_CST
10717 && tree_int_cst_sgn (arg1) == -1
10718 && negate_expr_p (arg0)
10719 && (tem = negate_expr (arg1)) != arg1
10720 && !TREE_OVERFLOW (tem))
10721 return fold_build2_loc (loc, MULT_EXPR, type,
10722 fold_convert_loc (loc, type,
10723 negate_expr (arg0)),
10724 tem);
10725
10726 /* (a * (1 << b)) is (a << b) */
10727 if (TREE_CODE (arg1) == LSHIFT_EXPR
10728 && integer_onep (TREE_OPERAND (arg1, 0)))
10729 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10730 TREE_OPERAND (arg1, 1));
10731 if (TREE_CODE (arg0) == LSHIFT_EXPR
10732 && integer_onep (TREE_OPERAND (arg0, 0)))
10733 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10734 TREE_OPERAND (arg0, 1));
10735
10736 /* (A + A) * C -> A * 2 * C */
10737 if (TREE_CODE (arg0) == PLUS_EXPR
10738 && TREE_CODE (arg1) == INTEGER_CST
10739 && operand_equal_p (TREE_OPERAND (arg0, 0),
10740 TREE_OPERAND (arg0, 1), 0))
10741 return fold_build2_loc (loc, MULT_EXPR, type,
10742 omit_one_operand_loc (loc, type,
10743 TREE_OPERAND (arg0, 0),
10744 TREE_OPERAND (arg0, 1)),
10745 fold_build2_loc (loc, MULT_EXPR, type,
10746 build_int_cst (type, 2) , arg1));
10747
10748 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10749 sign-changing only. */
10750 if (TREE_CODE (arg1) == INTEGER_CST
10751 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10752 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10753 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10754
10755 strict_overflow_p = false;
10756 if (TREE_CODE (arg1) == INTEGER_CST
10757 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10758 &strict_overflow_p)))
10759 {
10760 if (strict_overflow_p)
10761 fold_overflow_warning (("assuming signed overflow does not "
10762 "occur when simplifying "
10763 "multiplication"),
10764 WARN_STRICT_OVERFLOW_MISC);
10765 return fold_convert_loc (loc, type, tem);
10766 }
10767
10768 /* Optimize z * conj(z) for integer complex numbers. */
10769 if (TREE_CODE (arg0) == CONJ_EXPR
10770 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10771 return fold_mult_zconjz (loc, type, arg1);
10772 if (TREE_CODE (arg1) == CONJ_EXPR
10773 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10774 return fold_mult_zconjz (loc, type, arg0);
10775 }
10776 else
10777 {
10778 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10779 the result for floating point types due to rounding so it is applied
10780 only if -fassociative-math was specify. */
10781 if (flag_associative_math
10782 && TREE_CODE (arg0) == RDIV_EXPR
10783 && TREE_CODE (arg1) == REAL_CST
10784 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10785 {
10786 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10787 arg1);
10788 if (tem)
10789 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10790 TREE_OPERAND (arg0, 1));
10791 }
10792
10793 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10794 if (operand_equal_p (arg0, arg1, 0))
10795 {
10796 tree tem = fold_strip_sign_ops (arg0);
10797 if (tem != NULL_TREE)
10798 {
10799 tem = fold_convert_loc (loc, type, tem);
10800 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10801 }
10802 }
10803
10804 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10805 This is not the same for NaNs or if signed zeros are
10806 involved. */
10807 if (!HONOR_NANS (arg0)
10808 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10809 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10810 && TREE_CODE (arg1) == COMPLEX_CST
10811 && real_zerop (TREE_REALPART (arg1)))
10812 {
10813 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10814 if (real_onep (TREE_IMAGPART (arg1)))
10815 return
10816 fold_build2_loc (loc, COMPLEX_EXPR, type,
10817 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10818 rtype, arg0)),
10819 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10820 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10821 return
10822 fold_build2_loc (loc, COMPLEX_EXPR, type,
10823 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10824 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10825 rtype, arg0)));
10826 }
10827
10828 /* Optimize z * conj(z) for floating point complex numbers.
10829 Guarded by flag_unsafe_math_optimizations as non-finite
10830 imaginary components don't produce scalar results. */
10831 if (flag_unsafe_math_optimizations
10832 && TREE_CODE (arg0) == CONJ_EXPR
10833 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10834 return fold_mult_zconjz (loc, type, arg1);
10835 if (flag_unsafe_math_optimizations
10836 && TREE_CODE (arg1) == CONJ_EXPR
10837 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10838 return fold_mult_zconjz (loc, type, arg0);
10839
10840 if (flag_unsafe_math_optimizations)
10841 {
10842 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10843 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10844
10845 /* Optimizations of root(...)*root(...). */
10846 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10847 {
10848 tree rootfn, arg;
10849 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10850 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10851
10852 /* Optimize sqrt(x)*sqrt(x) as x. */
10853 if (BUILTIN_SQRT_P (fcode0)
10854 && operand_equal_p (arg00, arg10, 0)
10855 && ! HONOR_SNANS (element_mode (type)))
10856 return arg00;
10857
10858 /* Optimize root(x)*root(y) as root(x*y). */
10859 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10860 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10861 return build_call_expr_loc (loc, rootfn, 1, arg);
10862 }
10863
10864 /* Optimize expN(x)*expN(y) as expN(x+y). */
10865 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10866 {
10867 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10868 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10869 CALL_EXPR_ARG (arg0, 0),
10870 CALL_EXPR_ARG (arg1, 0));
10871 return build_call_expr_loc (loc, expfn, 1, arg);
10872 }
10873
10874 /* Optimizations of pow(...)*pow(...). */
10875 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10876 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10877 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10878 {
10879 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10880 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10881 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10882 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10883
10884 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10885 if (operand_equal_p (arg01, arg11, 0))
10886 {
10887 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10888 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10889 arg00, arg10);
10890 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10891 }
10892
10893 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10894 if (operand_equal_p (arg00, arg10, 0))
10895 {
10896 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10897 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10898 arg01, arg11);
10899 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10900 }
10901 }
10902
10903 /* Optimize tan(x)*cos(x) as sin(x). */
10904 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10905 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10906 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10907 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10908 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10909 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10910 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10911 CALL_EXPR_ARG (arg1, 0), 0))
10912 {
10913 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10914
10915 if (sinfn != NULL_TREE)
10916 return build_call_expr_loc (loc, sinfn, 1,
10917 CALL_EXPR_ARG (arg0, 0));
10918 }
10919
10920 /* Optimize x*pow(x,c) as pow(x,c+1). */
10921 if (fcode1 == BUILT_IN_POW
10922 || fcode1 == BUILT_IN_POWF
10923 || fcode1 == BUILT_IN_POWL)
10924 {
10925 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10926 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10927 if (TREE_CODE (arg11) == REAL_CST
10928 && !TREE_OVERFLOW (arg11)
10929 && operand_equal_p (arg0, arg10, 0))
10930 {
10931 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10932 REAL_VALUE_TYPE c;
10933 tree arg;
10934
10935 c = TREE_REAL_CST (arg11);
10936 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10937 arg = build_real (type, c);
10938 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10939 }
10940 }
10941
10942 /* Optimize pow(x,c)*x as pow(x,c+1). */
10943 if (fcode0 == BUILT_IN_POW
10944 || fcode0 == BUILT_IN_POWF
10945 || fcode0 == BUILT_IN_POWL)
10946 {
10947 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10948 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10949 if (TREE_CODE (arg01) == REAL_CST
10950 && !TREE_OVERFLOW (arg01)
10951 && operand_equal_p (arg1, arg00, 0))
10952 {
10953 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10954 REAL_VALUE_TYPE c;
10955 tree arg;
10956
10957 c = TREE_REAL_CST (arg01);
10958 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10959 arg = build_real (type, c);
10960 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10961 }
10962 }
10963
10964 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10965 if (!in_gimple_form
10966 && optimize
10967 && operand_equal_p (arg0, arg1, 0))
10968 {
10969 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10970
10971 if (powfn)
10972 {
10973 tree arg = build_real (type, dconst2);
10974 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10975 }
10976 }
10977 }
10978 }
10979 goto associate;
10980
10981 case BIT_IOR_EXPR:
10982 bit_ior:
10983 /* ~X | X is -1. */
10984 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10985 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10986 {
10987 t1 = build_zero_cst (type);
10988 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10989 return omit_one_operand_loc (loc, type, t1, arg1);
10990 }
10991
10992 /* X | ~X is -1. */
10993 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10994 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10995 {
10996 t1 = build_zero_cst (type);
10997 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10998 return omit_one_operand_loc (loc, type, t1, arg0);
10999 }
11000
11001 /* Canonicalize (X & C1) | C2. */
11002 if (TREE_CODE (arg0) == BIT_AND_EXPR
11003 && TREE_CODE (arg1) == INTEGER_CST
11004 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11005 {
11006 int width = TYPE_PRECISION (type), w;
11007 wide_int c1 = TREE_OPERAND (arg0, 1);
11008 wide_int c2 = arg1;
11009
11010 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11011 if ((c1 & c2) == c1)
11012 return omit_one_operand_loc (loc, type, arg1,
11013 TREE_OPERAND (arg0, 0));
11014
11015 wide_int msk = wi::mask (width, false,
11016 TYPE_PRECISION (TREE_TYPE (arg1)));
11017
11018 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11019 if (msk.and_not (c1 | c2) == 0)
11020 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11021 TREE_OPERAND (arg0, 0), arg1);
11022
11023 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11024 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11025 mode which allows further optimizations. */
11026 c1 &= msk;
11027 c2 &= msk;
11028 wide_int c3 = c1.and_not (c2);
11029 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11030 {
11031 wide_int mask = wi::mask (w, false,
11032 TYPE_PRECISION (type));
11033 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11034 {
11035 c3 = mask;
11036 break;
11037 }
11038 }
11039
11040 if (c3 != c1)
11041 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11042 fold_build2_loc (loc, BIT_AND_EXPR, type,
11043 TREE_OPERAND (arg0, 0),
11044 wide_int_to_tree (type,
11045 c3)),
11046 arg1);
11047 }
11048
11049 /* (X & ~Y) | (~X & Y) is X ^ Y */
11050 if (TREE_CODE (arg0) == BIT_AND_EXPR
11051 && TREE_CODE (arg1) == BIT_AND_EXPR)
11052 {
11053 tree a0, a1, l0, l1, n0, n1;
11054
11055 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11056 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11057
11058 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11059 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11060
11061 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11062 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11063
11064 if ((operand_equal_p (n0, a0, 0)
11065 && operand_equal_p (n1, a1, 0))
11066 || (operand_equal_p (n0, a1, 0)
11067 && operand_equal_p (n1, a0, 0)))
11068 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11069 }
11070
11071 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11072 if (t1 != NULL_TREE)
11073 return t1;
11074
11075 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11076
11077 This results in more efficient code for machines without a NAND
11078 instruction. Combine will canonicalize to the first form
11079 which will allow use of NAND instructions provided by the
11080 backend if they exist. */
11081 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11082 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11083 {
11084 return
11085 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11086 build2 (BIT_AND_EXPR, type,
11087 fold_convert_loc (loc, type,
11088 TREE_OPERAND (arg0, 0)),
11089 fold_convert_loc (loc, type,
11090 TREE_OPERAND (arg1, 0))));
11091 }
11092
11093 /* See if this can be simplified into a rotate first. If that
11094 is unsuccessful continue in the association code. */
11095 goto bit_rotate;
11096
11097 case BIT_XOR_EXPR:
11098 /* ~X ^ X is -1. */
11099 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11100 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11101 {
11102 t1 = build_zero_cst (type);
11103 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11104 return omit_one_operand_loc (loc, type, t1, arg1);
11105 }
11106
11107 /* X ^ ~X is -1. */
11108 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11109 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11110 {
11111 t1 = build_zero_cst (type);
11112 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11113 return omit_one_operand_loc (loc, type, t1, arg0);
11114 }
11115
11116 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11117 with a constant, and the two constants have no bits in common,
11118 we should treat this as a BIT_IOR_EXPR since this may produce more
11119 simplifications. */
11120 if (TREE_CODE (arg0) == BIT_AND_EXPR
11121 && TREE_CODE (arg1) == BIT_AND_EXPR
11122 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11123 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11124 && wi::bit_and (TREE_OPERAND (arg0, 1),
11125 TREE_OPERAND (arg1, 1)) == 0)
11126 {
11127 code = BIT_IOR_EXPR;
11128 goto bit_ior;
11129 }
11130
11131 /* (X | Y) ^ X -> Y & ~ X*/
11132 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11133 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11134 {
11135 tree t2 = TREE_OPERAND (arg0, 1);
11136 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11137 arg1);
11138 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11139 fold_convert_loc (loc, type, t2),
11140 fold_convert_loc (loc, type, t1));
11141 return t1;
11142 }
11143
11144 /* (Y | X) ^ X -> Y & ~ X*/
11145 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11146 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11147 {
11148 tree t2 = TREE_OPERAND (arg0, 0);
11149 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11150 arg1);
11151 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11152 fold_convert_loc (loc, type, t2),
11153 fold_convert_loc (loc, type, t1));
11154 return t1;
11155 }
11156
11157 /* X ^ (X | Y) -> Y & ~ X*/
11158 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11159 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11160 {
11161 tree t2 = TREE_OPERAND (arg1, 1);
11162 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11163 arg0);
11164 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11165 fold_convert_loc (loc, type, t2),
11166 fold_convert_loc (loc, type, t1));
11167 return t1;
11168 }
11169
11170 /* X ^ (Y | X) -> Y & ~ X*/
11171 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11172 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11173 {
11174 tree t2 = TREE_OPERAND (arg1, 0);
11175 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11176 arg0);
11177 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11178 fold_convert_loc (loc, type, t2),
11179 fold_convert_loc (loc, type, t1));
11180 return t1;
11181 }
11182
11183 /* Convert ~X ^ ~Y to X ^ Y. */
11184 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11185 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11186 return fold_build2_loc (loc, code, type,
11187 fold_convert_loc (loc, type,
11188 TREE_OPERAND (arg0, 0)),
11189 fold_convert_loc (loc, type,
11190 TREE_OPERAND (arg1, 0)));
11191
11192 /* Convert ~X ^ C to X ^ ~C. */
11193 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11194 && TREE_CODE (arg1) == INTEGER_CST)
11195 return fold_build2_loc (loc, code, type,
11196 fold_convert_loc (loc, type,
11197 TREE_OPERAND (arg0, 0)),
11198 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11199
11200 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11201 if (TREE_CODE (arg0) == BIT_AND_EXPR
11202 && INTEGRAL_TYPE_P (type)
11203 && integer_onep (TREE_OPERAND (arg0, 1))
11204 && integer_onep (arg1))
11205 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11206 build_zero_cst (TREE_TYPE (arg0)));
11207
11208 /* Fold (X & Y) ^ Y as ~X & Y. */
11209 if (TREE_CODE (arg0) == BIT_AND_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11211 {
11212 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11213 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11214 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11215 fold_convert_loc (loc, type, arg1));
11216 }
11217 /* Fold (X & Y) ^ X as ~Y & X. */
11218 if (TREE_CODE (arg0) == BIT_AND_EXPR
11219 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11220 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11221 {
11222 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11223 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11224 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11225 fold_convert_loc (loc, type, arg1));
11226 }
11227 /* Fold X ^ (X & Y) as X & ~Y. */
11228 if (TREE_CODE (arg1) == BIT_AND_EXPR
11229 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11230 {
11231 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11232 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11233 fold_convert_loc (loc, type, arg0),
11234 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11235 }
11236 /* Fold X ^ (Y & X) as ~Y & X. */
11237 if (TREE_CODE (arg1) == BIT_AND_EXPR
11238 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11239 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11240 {
11241 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11242 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11243 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11244 fold_convert_loc (loc, type, arg0));
11245 }
11246
11247 /* See if this can be simplified into a rotate first. If that
11248 is unsuccessful continue in the association code. */
11249 goto bit_rotate;
11250
11251 case BIT_AND_EXPR:
11252 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11253 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11254 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11255 || (TREE_CODE (arg0) == EQ_EXPR
11256 && integer_zerop (TREE_OPERAND (arg0, 1))))
11257 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11258 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11259
11260 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11261 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11262 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11263 || (TREE_CODE (arg1) == EQ_EXPR
11264 && integer_zerop (TREE_OPERAND (arg1, 1))))
11265 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11266 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11267
11268 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11269 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11270 && INTEGRAL_TYPE_P (type)
11271 && integer_onep (TREE_OPERAND (arg0, 1))
11272 && integer_onep (arg1))
11273 {
11274 tree tem2;
11275 tem = TREE_OPERAND (arg0, 0);
11276 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11277 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11278 tem, tem2);
11279 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11280 build_zero_cst (TREE_TYPE (tem)));
11281 }
11282 /* Fold ~X & 1 as (X & 1) == 0. */
11283 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11284 && INTEGRAL_TYPE_P (type)
11285 && integer_onep (arg1))
11286 {
11287 tree tem2;
11288 tem = TREE_OPERAND (arg0, 0);
11289 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11290 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11291 tem, tem2);
11292 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11293 build_zero_cst (TREE_TYPE (tem)));
11294 }
11295 /* Fold !X & 1 as X == 0. */
11296 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11297 && integer_onep (arg1))
11298 {
11299 tem = TREE_OPERAND (arg0, 0);
11300 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11301 build_zero_cst (TREE_TYPE (tem)));
11302 }
11303
11304 /* Fold (X ^ Y) & Y as ~X & Y. */
11305 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11306 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11307 {
11308 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11309 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11310 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11311 fold_convert_loc (loc, type, arg1));
11312 }
11313 /* Fold (X ^ Y) & X as ~Y & X. */
11314 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11315 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11316 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11317 {
11318 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11319 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11320 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11321 fold_convert_loc (loc, type, arg1));
11322 }
11323 /* Fold X & (X ^ Y) as X & ~Y. */
11324 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11326 {
11327 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11328 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11329 fold_convert_loc (loc, type, arg0),
11330 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11331 }
11332 /* Fold X & (Y ^ X) as ~Y & X. */
11333 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11334 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11335 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11336 {
11337 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11338 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11339 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11340 fold_convert_loc (loc, type, arg0));
11341 }
11342
11343 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11344 multiple of 1 << CST. */
11345 if (TREE_CODE (arg1) == INTEGER_CST)
11346 {
11347 wide_int cst1 = arg1;
11348 wide_int ncst1 = -cst1;
11349 if ((cst1 & ncst1) == ncst1
11350 && multiple_of_p (type, arg0,
11351 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11352 return fold_convert_loc (loc, type, arg0);
11353 }
11354
11355 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11356 bits from CST2. */
11357 if (TREE_CODE (arg1) == INTEGER_CST
11358 && TREE_CODE (arg0) == MULT_EXPR
11359 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11360 {
11361 wide_int warg1 = arg1;
11362 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11363
11364 if (masked == 0)
11365 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11366 arg0, arg1);
11367 else if (masked != warg1)
11368 {
11369 /* Avoid the transform if arg1 is a mask of some
11370 mode which allows further optimizations. */
11371 int pop = wi::popcount (warg1);
11372 if (!(pop >= BITS_PER_UNIT
11373 && exact_log2 (pop) != -1
11374 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11375 return fold_build2_loc (loc, code, type, op0,
11376 wide_int_to_tree (type, masked));
11377 }
11378 }
11379
11380 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11381 ((A & N) + B) & M -> (A + B) & M
11382 Similarly if (N & M) == 0,
11383 ((A | N) + B) & M -> (A + B) & M
11384 and for - instead of + (or unary - instead of +)
11385 and/or ^ instead of |.
11386 If B is constant and (B & M) == 0, fold into A & M. */
11387 if (TREE_CODE (arg1) == INTEGER_CST)
11388 {
11389 wide_int cst1 = arg1;
11390 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11391 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11392 && (TREE_CODE (arg0) == PLUS_EXPR
11393 || TREE_CODE (arg0) == MINUS_EXPR
11394 || TREE_CODE (arg0) == NEGATE_EXPR)
11395 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11396 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11397 {
11398 tree pmop[2];
11399 int which = 0;
11400 wide_int cst0;
11401
11402 /* Now we know that arg0 is (C + D) or (C - D) or
11403 -C and arg1 (M) is == (1LL << cst) - 1.
11404 Store C into PMOP[0] and D into PMOP[1]. */
11405 pmop[0] = TREE_OPERAND (arg0, 0);
11406 pmop[1] = NULL;
11407 if (TREE_CODE (arg0) != NEGATE_EXPR)
11408 {
11409 pmop[1] = TREE_OPERAND (arg0, 1);
11410 which = 1;
11411 }
11412
11413 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11414 which = -1;
11415
11416 for (; which >= 0; which--)
11417 switch (TREE_CODE (pmop[which]))
11418 {
11419 case BIT_AND_EXPR:
11420 case BIT_IOR_EXPR:
11421 case BIT_XOR_EXPR:
11422 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11423 != INTEGER_CST)
11424 break;
11425 cst0 = TREE_OPERAND (pmop[which], 1);
11426 cst0 &= cst1;
11427 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11428 {
11429 if (cst0 != cst1)
11430 break;
11431 }
11432 else if (cst0 != 0)
11433 break;
11434 /* If C or D is of the form (A & N) where
11435 (N & M) == M, or of the form (A | N) or
11436 (A ^ N) where (N & M) == 0, replace it with A. */
11437 pmop[which] = TREE_OPERAND (pmop[which], 0);
11438 break;
11439 case INTEGER_CST:
11440 /* If C or D is a N where (N & M) == 0, it can be
11441 omitted (assumed 0). */
11442 if ((TREE_CODE (arg0) == PLUS_EXPR
11443 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11444 && (cst1 & pmop[which]) == 0)
11445 pmop[which] = NULL;
11446 break;
11447 default:
11448 break;
11449 }
11450
11451 /* Only build anything new if we optimized one or both arguments
11452 above. */
11453 if (pmop[0] != TREE_OPERAND (arg0, 0)
11454 || (TREE_CODE (arg0) != NEGATE_EXPR
11455 && pmop[1] != TREE_OPERAND (arg0, 1)))
11456 {
11457 tree utype = TREE_TYPE (arg0);
11458 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11459 {
11460 /* Perform the operations in a type that has defined
11461 overflow behavior. */
11462 utype = unsigned_type_for (TREE_TYPE (arg0));
11463 if (pmop[0] != NULL)
11464 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11465 if (pmop[1] != NULL)
11466 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11467 }
11468
11469 if (TREE_CODE (arg0) == NEGATE_EXPR)
11470 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11471 else if (TREE_CODE (arg0) == PLUS_EXPR)
11472 {
11473 if (pmop[0] != NULL && pmop[1] != NULL)
11474 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11475 pmop[0], pmop[1]);
11476 else if (pmop[0] != NULL)
11477 tem = pmop[0];
11478 else if (pmop[1] != NULL)
11479 tem = pmop[1];
11480 else
11481 return build_int_cst (type, 0);
11482 }
11483 else if (pmop[0] == NULL)
11484 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11485 else
11486 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11487 pmop[0], pmop[1]);
11488 /* TEM is now the new binary +, - or unary - replacement. */
11489 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11490 fold_convert_loc (loc, utype, arg1));
11491 return fold_convert_loc (loc, type, tem);
11492 }
11493 }
11494 }
11495
11496 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11497 if (t1 != NULL_TREE)
11498 return t1;
11499 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11500 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11501 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11502 {
11503 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11504
11505 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11506 if (mask == -1)
11507 return
11508 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11509 }
11510
11511 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11512
11513 This results in more efficient code for machines without a NOR
11514 instruction. Combine will canonicalize to the first form
11515 which will allow use of NOR instructions provided by the
11516 backend if they exist. */
11517 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11518 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11519 {
11520 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11521 build2 (BIT_IOR_EXPR, type,
11522 fold_convert_loc (loc, type,
11523 TREE_OPERAND (arg0, 0)),
11524 fold_convert_loc (loc, type,
11525 TREE_OPERAND (arg1, 0))));
11526 }
11527
11528 /* If arg0 is derived from the address of an object or function, we may
11529 be able to fold this expression using the object or function's
11530 alignment. */
11531 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11532 {
11533 unsigned HOST_WIDE_INT modulus, residue;
11534 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11535
11536 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11537 integer_onep (arg1));
11538
11539 /* This works because modulus is a power of 2. If this weren't the
11540 case, we'd have to replace it by its greatest power-of-2
11541 divisor: modulus & -modulus. */
11542 if (low < modulus)
11543 return build_int_cst (type, residue & low);
11544 }
11545
11546 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11547 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11548 if the new mask might be further optimized. */
11549 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11550 || TREE_CODE (arg0) == RSHIFT_EXPR)
11551 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11552 && TREE_CODE (arg1) == INTEGER_CST
11553 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11554 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11555 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11556 < TYPE_PRECISION (TREE_TYPE (arg0))))
11557 {
11558 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11559 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11560 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11561 tree shift_type = TREE_TYPE (arg0);
11562
11563 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11564 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11565 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11566 && TYPE_PRECISION (TREE_TYPE (arg0))
11567 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11568 {
11569 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11570 tree arg00 = TREE_OPERAND (arg0, 0);
11571 /* See if more bits can be proven as zero because of
11572 zero extension. */
11573 if (TREE_CODE (arg00) == NOP_EXPR
11574 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11575 {
11576 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11577 if (TYPE_PRECISION (inner_type)
11578 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11579 && TYPE_PRECISION (inner_type) < prec)
11580 {
11581 prec = TYPE_PRECISION (inner_type);
11582 /* See if we can shorten the right shift. */
11583 if (shiftc < prec)
11584 shift_type = inner_type;
11585 /* Otherwise X >> C1 is all zeros, so we'll optimize
11586 it into (X, 0) later on by making sure zerobits
11587 is all ones. */
11588 }
11589 }
11590 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11591 if (shiftc < prec)
11592 {
11593 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11594 zerobits <<= prec - shiftc;
11595 }
11596 /* For arithmetic shift if sign bit could be set, zerobits
11597 can contain actually sign bits, so no transformation is
11598 possible, unless MASK masks them all away. In that
11599 case the shift needs to be converted into logical shift. */
11600 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11601 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11602 {
11603 if ((mask & zerobits) == 0)
11604 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11605 else
11606 zerobits = 0;
11607 }
11608 }
11609
11610 /* ((X << 16) & 0xff00) is (X, 0). */
11611 if ((mask & zerobits) == mask)
11612 return omit_one_operand_loc (loc, type,
11613 build_int_cst (type, 0), arg0);
11614
11615 newmask = mask | zerobits;
11616 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11617 {
11618 /* Only do the transformation if NEWMASK is some integer
11619 mode's mask. */
11620 for (prec = BITS_PER_UNIT;
11621 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11622 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11623 break;
11624 if (prec < HOST_BITS_PER_WIDE_INT
11625 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11626 {
11627 tree newmaskt;
11628
11629 if (shift_type != TREE_TYPE (arg0))
11630 {
11631 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11632 fold_convert_loc (loc, shift_type,
11633 TREE_OPERAND (arg0, 0)),
11634 TREE_OPERAND (arg0, 1));
11635 tem = fold_convert_loc (loc, type, tem);
11636 }
11637 else
11638 tem = op0;
11639 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11640 if (!tree_int_cst_equal (newmaskt, arg1))
11641 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11642 }
11643 }
11644 }
11645
11646 goto associate;
11647
11648 case RDIV_EXPR:
11649 /* Don't touch a floating-point divide by zero unless the mode
11650 of the constant can represent infinity. */
11651 if (TREE_CODE (arg1) == REAL_CST
11652 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11653 && real_zerop (arg1))
11654 return NULL_TREE;
11655
11656 /* (-A) / (-B) -> A / B */
11657 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11658 return fold_build2_loc (loc, RDIV_EXPR, type,
11659 TREE_OPERAND (arg0, 0),
11660 negate_expr (arg1));
11661 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11662 return fold_build2_loc (loc, RDIV_EXPR, type,
11663 negate_expr (arg0),
11664 TREE_OPERAND (arg1, 0));
11665
11666 /* Convert A/B/C to A/(B*C). */
11667 if (flag_reciprocal_math
11668 && TREE_CODE (arg0) == RDIV_EXPR)
11669 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11670 fold_build2_loc (loc, MULT_EXPR, type,
11671 TREE_OPERAND (arg0, 1), arg1));
11672
11673 /* Convert A/(B/C) to (A/B)*C. */
11674 if (flag_reciprocal_math
11675 && TREE_CODE (arg1) == RDIV_EXPR)
11676 return fold_build2_loc (loc, MULT_EXPR, type,
11677 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11678 TREE_OPERAND (arg1, 0)),
11679 TREE_OPERAND (arg1, 1));
11680
11681 /* Convert C1/(X*C2) into (C1/C2)/X. */
11682 if (flag_reciprocal_math
11683 && TREE_CODE (arg1) == MULT_EXPR
11684 && TREE_CODE (arg0) == REAL_CST
11685 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11686 {
11687 tree tem = const_binop (RDIV_EXPR, arg0,
11688 TREE_OPERAND (arg1, 1));
11689 if (tem)
11690 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11691 TREE_OPERAND (arg1, 0));
11692 }
11693
11694 if (flag_unsafe_math_optimizations)
11695 {
11696 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11697 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11698
11699 /* Optimize sin(x)/cos(x) as tan(x). */
11700 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11701 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11702 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11703 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11704 CALL_EXPR_ARG (arg1, 0), 0))
11705 {
11706 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11707
11708 if (tanfn != NULL_TREE)
11709 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11710 }
11711
11712 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11713 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11714 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11715 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11716 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11717 CALL_EXPR_ARG (arg1, 0), 0))
11718 {
11719 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11720
11721 if (tanfn != NULL_TREE)
11722 {
11723 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11724 CALL_EXPR_ARG (arg0, 0));
11725 return fold_build2_loc (loc, RDIV_EXPR, type,
11726 build_real (type, dconst1), tmp);
11727 }
11728 }
11729
11730 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11731 NaNs or Infinities. */
11732 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11733 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11734 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11735 {
11736 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11737 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11738
11739 if (! HONOR_NANS (arg00)
11740 && ! HONOR_INFINITIES (element_mode (arg00))
11741 && operand_equal_p (arg00, arg01, 0))
11742 {
11743 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11744
11745 if (cosfn != NULL_TREE)
11746 return build_call_expr_loc (loc, cosfn, 1, arg00);
11747 }
11748 }
11749
11750 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11751 NaNs or Infinities. */
11752 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11753 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11754 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11755 {
11756 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11757 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11758
11759 if (! HONOR_NANS (arg00)
11760 && ! HONOR_INFINITIES (element_mode (arg00))
11761 && operand_equal_p (arg00, arg01, 0))
11762 {
11763 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11764
11765 if (cosfn != NULL_TREE)
11766 {
11767 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11768 return fold_build2_loc (loc, RDIV_EXPR, type,
11769 build_real (type, dconst1),
11770 tmp);
11771 }
11772 }
11773 }
11774
11775 /* Optimize pow(x,c)/x as pow(x,c-1). */
11776 if (fcode0 == BUILT_IN_POW
11777 || fcode0 == BUILT_IN_POWF
11778 || fcode0 == BUILT_IN_POWL)
11779 {
11780 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11781 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11782 if (TREE_CODE (arg01) == REAL_CST
11783 && !TREE_OVERFLOW (arg01)
11784 && operand_equal_p (arg1, arg00, 0))
11785 {
11786 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11787 REAL_VALUE_TYPE c;
11788 tree arg;
11789
11790 c = TREE_REAL_CST (arg01);
11791 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11792 arg = build_real (type, c);
11793 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11794 }
11795 }
11796
11797 /* Optimize a/root(b/c) into a*root(c/b). */
11798 if (BUILTIN_ROOT_P (fcode1))
11799 {
11800 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11801
11802 if (TREE_CODE (rootarg) == RDIV_EXPR)
11803 {
11804 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11805 tree b = TREE_OPERAND (rootarg, 0);
11806 tree c = TREE_OPERAND (rootarg, 1);
11807
11808 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11809
11810 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11811 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11812 }
11813 }
11814
11815 /* Optimize x/expN(y) into x*expN(-y). */
11816 if (BUILTIN_EXPONENT_P (fcode1))
11817 {
11818 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11819 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11820 arg1 = build_call_expr_loc (loc,
11821 expfn, 1,
11822 fold_convert_loc (loc, type, arg));
11823 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11824 }
11825
11826 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11827 if (fcode1 == BUILT_IN_POW
11828 || fcode1 == BUILT_IN_POWF
11829 || fcode1 == BUILT_IN_POWL)
11830 {
11831 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11832 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11833 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11834 tree neg11 = fold_convert_loc (loc, type,
11835 negate_expr (arg11));
11836 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11837 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11838 }
11839 }
11840 return NULL_TREE;
11841
11842 case TRUNC_DIV_EXPR:
11843 /* Optimize (X & (-A)) / A where A is a power of 2,
11844 to X >> log2(A) */
11845 if (TREE_CODE (arg0) == BIT_AND_EXPR
11846 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11847 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11848 {
11849 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11850 arg1, TREE_OPERAND (arg0, 1));
11851 if (sum && integer_zerop (sum)) {
11852 tree pow2 = build_int_cst (integer_type_node,
11853 wi::exact_log2 (arg1));
11854 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11855 TREE_OPERAND (arg0, 0), pow2);
11856 }
11857 }
11858
11859 /* Fall through */
11860
11861 case FLOOR_DIV_EXPR:
11862 /* Simplify A / (B << N) where A and B are positive and B is
11863 a power of 2, to A >> (N + log2(B)). */
11864 strict_overflow_p = false;
11865 if (TREE_CODE (arg1) == LSHIFT_EXPR
11866 && (TYPE_UNSIGNED (type)
11867 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11868 {
11869 tree sval = TREE_OPERAND (arg1, 0);
11870 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11871 {
11872 tree sh_cnt = TREE_OPERAND (arg1, 1);
11873 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11874 wi::exact_log2 (sval));
11875
11876 if (strict_overflow_p)
11877 fold_overflow_warning (("assuming signed overflow does not "
11878 "occur when simplifying A / (B << N)"),
11879 WARN_STRICT_OVERFLOW_MISC);
11880
11881 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11882 sh_cnt, pow2);
11883 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11884 fold_convert_loc (loc, type, arg0), sh_cnt);
11885 }
11886 }
11887
11888 /* Fall through */
11889
11890 case ROUND_DIV_EXPR:
11891 case CEIL_DIV_EXPR:
11892 case EXACT_DIV_EXPR:
11893 if (integer_zerop (arg1))
11894 return NULL_TREE;
11895
11896 /* Convert -A / -B to A / B when the type is signed and overflow is
11897 undefined. */
11898 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11899 && TREE_CODE (arg0) == NEGATE_EXPR
11900 && negate_expr_p (arg1))
11901 {
11902 if (INTEGRAL_TYPE_P (type))
11903 fold_overflow_warning (("assuming signed overflow does not occur "
11904 "when distributing negation across "
11905 "division"),
11906 WARN_STRICT_OVERFLOW_MISC);
11907 return fold_build2_loc (loc, code, type,
11908 fold_convert_loc (loc, type,
11909 TREE_OPERAND (arg0, 0)),
11910 fold_convert_loc (loc, type,
11911 negate_expr (arg1)));
11912 }
11913 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11914 && TREE_CODE (arg1) == NEGATE_EXPR
11915 && negate_expr_p (arg0))
11916 {
11917 if (INTEGRAL_TYPE_P (type))
11918 fold_overflow_warning (("assuming signed overflow does not occur "
11919 "when distributing negation across "
11920 "division"),
11921 WARN_STRICT_OVERFLOW_MISC);
11922 return fold_build2_loc (loc, code, type,
11923 fold_convert_loc (loc, type,
11924 negate_expr (arg0)),
11925 fold_convert_loc (loc, type,
11926 TREE_OPERAND (arg1, 0)));
11927 }
11928
11929 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11930 operation, EXACT_DIV_EXPR.
11931
11932 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11933 At one time others generated faster code, it's not clear if they do
11934 after the last round to changes to the DIV code in expmed.c. */
11935 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11936 && multiple_of_p (type, arg0, arg1))
11937 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11938
11939 strict_overflow_p = false;
11940 if (TREE_CODE (arg1) == INTEGER_CST
11941 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11942 &strict_overflow_p)))
11943 {
11944 if (strict_overflow_p)
11945 fold_overflow_warning (("assuming signed overflow does not occur "
11946 "when simplifying division"),
11947 WARN_STRICT_OVERFLOW_MISC);
11948 return fold_convert_loc (loc, type, tem);
11949 }
11950
11951 return NULL_TREE;
11952
11953 case CEIL_MOD_EXPR:
11954 case FLOOR_MOD_EXPR:
11955 case ROUND_MOD_EXPR:
11956 case TRUNC_MOD_EXPR:
11957 /* X % -Y is the same as X % Y. */
11958 if (code == TRUNC_MOD_EXPR
11959 && !TYPE_UNSIGNED (type)
11960 && TREE_CODE (arg1) == NEGATE_EXPR
11961 && !TYPE_OVERFLOW_TRAPS (type))
11962 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11963 fold_convert_loc (loc, type,
11964 TREE_OPERAND (arg1, 0)));
11965
11966 strict_overflow_p = false;
11967 if (TREE_CODE (arg1) == INTEGER_CST
11968 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11969 &strict_overflow_p)))
11970 {
11971 if (strict_overflow_p)
11972 fold_overflow_warning (("assuming signed overflow does not occur "
11973 "when simplifying modulus"),
11974 WARN_STRICT_OVERFLOW_MISC);
11975 return fold_convert_loc (loc, type, tem);
11976 }
11977
11978 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11979 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11980 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11981 && (TYPE_UNSIGNED (type)
11982 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11983 {
11984 tree c = arg1;
11985 /* Also optimize A % (C << N) where C is a power of 2,
11986 to A & ((C << N) - 1). */
11987 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11988 c = TREE_OPERAND (arg1, 0);
11989
11990 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11991 {
11992 tree mask
11993 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11994 build_int_cst (TREE_TYPE (arg1), 1));
11995 if (strict_overflow_p)
11996 fold_overflow_warning (("assuming signed overflow does not "
11997 "occur when simplifying "
11998 "X % (power of two)"),
11999 WARN_STRICT_OVERFLOW_MISC);
12000 return fold_build2_loc (loc, BIT_AND_EXPR, type,
12001 fold_convert_loc (loc, type, arg0),
12002 fold_convert_loc (loc, type, mask));
12003 }
12004 }
12005
12006 return NULL_TREE;
12007
12008 case LROTATE_EXPR:
12009 case RROTATE_EXPR:
12010 case RSHIFT_EXPR:
12011 case LSHIFT_EXPR:
12012 /* Since negative shift count is not well-defined,
12013 don't try to compute it in the compiler. */
12014 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12015 return NULL_TREE;
12016
12017 prec = element_precision (type);
12018
12019 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12020 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12021 && tree_to_uhwi (arg1) < prec
12022 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12023 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12024 {
12025 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12026 + tree_to_uhwi (arg1));
12027
12028 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12029 being well defined. */
12030 if (low >= prec)
12031 {
12032 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12033 low = low % prec;
12034 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12035 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12036 TREE_OPERAND (arg0, 0));
12037 else
12038 low = prec - 1;
12039 }
12040
12041 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12042 build_int_cst (TREE_TYPE (arg1), low));
12043 }
12044
12045 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12046 into x & ((unsigned)-1 >> c) for unsigned types. */
12047 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12048 || (TYPE_UNSIGNED (type)
12049 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12050 && tree_fits_uhwi_p (arg1)
12051 && tree_to_uhwi (arg1) < prec
12052 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12053 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12054 {
12055 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12056 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12057 tree lshift;
12058 tree arg00;
12059
12060 if (low0 == low1)
12061 {
12062 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12063
12064 lshift = build_minus_one_cst (type);
12065 lshift = const_binop (code, lshift, arg1);
12066
12067 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12068 }
12069 }
12070
12071 /* If we have a rotate of a bit operation with the rotate count and
12072 the second operand of the bit operation both constant,
12073 permute the two operations. */
12074 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12075 && (TREE_CODE (arg0) == BIT_AND_EXPR
12076 || TREE_CODE (arg0) == BIT_IOR_EXPR
12077 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12078 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12079 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12080 fold_build2_loc (loc, code, type,
12081 TREE_OPERAND (arg0, 0), arg1),
12082 fold_build2_loc (loc, code, type,
12083 TREE_OPERAND (arg0, 1), arg1));
12084
12085 /* Two consecutive rotates adding up to the some integer
12086 multiple of the precision of the type can be ignored. */
12087 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12088 && TREE_CODE (arg0) == RROTATE_EXPR
12089 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12090 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12091 prec) == 0)
12092 return TREE_OPERAND (arg0, 0);
12093
12094 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12095 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12096 if the latter can be further optimized. */
12097 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12098 && TREE_CODE (arg0) == BIT_AND_EXPR
12099 && TREE_CODE (arg1) == INTEGER_CST
12100 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12101 {
12102 tree mask = fold_build2_loc (loc, code, type,
12103 fold_convert_loc (loc, type,
12104 TREE_OPERAND (arg0, 1)),
12105 arg1);
12106 tree shift = fold_build2_loc (loc, code, type,
12107 fold_convert_loc (loc, type,
12108 TREE_OPERAND (arg0, 0)),
12109 arg1);
12110 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12111 if (tem)
12112 return tem;
12113 }
12114
12115 return NULL_TREE;
12116
12117 case MIN_EXPR:
12118 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12119 if (tem)
12120 return tem;
12121 goto associate;
12122
12123 case MAX_EXPR:
12124 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12125 if (tem)
12126 return tem;
12127 goto associate;
12128
12129 case TRUTH_ANDIF_EXPR:
12130 /* Note that the operands of this must be ints
12131 and their values must be 0 or 1.
12132 ("true" is a fixed value perhaps depending on the language.) */
12133 /* If first arg is constant zero, return it. */
12134 if (integer_zerop (arg0))
12135 return fold_convert_loc (loc, type, arg0);
12136 case TRUTH_AND_EXPR:
12137 /* If either arg is constant true, drop it. */
12138 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12139 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12140 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12141 /* Preserve sequence points. */
12142 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12143 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12144 /* If second arg is constant zero, result is zero, but first arg
12145 must be evaluated. */
12146 if (integer_zerop (arg1))
12147 return omit_one_operand_loc (loc, type, arg1, arg0);
12148 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12149 case will be handled here. */
12150 if (integer_zerop (arg0))
12151 return omit_one_operand_loc (loc, type, arg0, arg1);
12152
12153 /* !X && X is always false. */
12154 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12155 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12156 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12157 /* X && !X is always false. */
12158 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12159 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12160 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12161
12162 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12163 means A >= Y && A != MAX, but in this case we know that
12164 A < X <= MAX. */
12165
12166 if (!TREE_SIDE_EFFECTS (arg0)
12167 && !TREE_SIDE_EFFECTS (arg1))
12168 {
12169 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12170 if (tem && !operand_equal_p (tem, arg0, 0))
12171 return fold_build2_loc (loc, code, type, tem, arg1);
12172
12173 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12174 if (tem && !operand_equal_p (tem, arg1, 0))
12175 return fold_build2_loc (loc, code, type, arg0, tem);
12176 }
12177
12178 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12179 != NULL_TREE)
12180 return tem;
12181
12182 return NULL_TREE;
12183
12184 case TRUTH_ORIF_EXPR:
12185 /* Note that the operands of this must be ints
12186 and their values must be 0 or true.
12187 ("true" is a fixed value perhaps depending on the language.) */
12188 /* If first arg is constant true, return it. */
12189 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12190 return fold_convert_loc (loc, type, arg0);
12191 case TRUTH_OR_EXPR:
12192 /* If either arg is constant zero, drop it. */
12193 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12194 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12195 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12196 /* Preserve sequence points. */
12197 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12198 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12199 /* If second arg is constant true, result is true, but we must
12200 evaluate first arg. */
12201 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12202 return omit_one_operand_loc (loc, type, arg1, arg0);
12203 /* Likewise for first arg, but note this only occurs here for
12204 TRUTH_OR_EXPR. */
12205 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12206 return omit_one_operand_loc (loc, type, arg0, arg1);
12207
12208 /* !X || X is always true. */
12209 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12210 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12211 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12212 /* X || !X is always true. */
12213 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12214 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12215 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12216
12217 /* (X && !Y) || (!X && Y) is X ^ Y */
12218 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12219 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12220 {
12221 tree a0, a1, l0, l1, n0, n1;
12222
12223 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12224 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12225
12226 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12227 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12228
12229 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12230 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12231
12232 if ((operand_equal_p (n0, a0, 0)
12233 && operand_equal_p (n1, a1, 0))
12234 || (operand_equal_p (n0, a1, 0)
12235 && operand_equal_p (n1, a0, 0)))
12236 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12237 }
12238
12239 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12240 != NULL_TREE)
12241 return tem;
12242
12243 return NULL_TREE;
12244
12245 case TRUTH_XOR_EXPR:
12246 /* If the second arg is constant zero, drop it. */
12247 if (integer_zerop (arg1))
12248 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12249 /* If the second arg is constant true, this is a logical inversion. */
12250 if (integer_onep (arg1))
12251 {
12252 tem = invert_truthvalue_loc (loc, arg0);
12253 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12254 }
12255 /* Identical arguments cancel to zero. */
12256 if (operand_equal_p (arg0, arg1, 0))
12257 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12258
12259 /* !X ^ X is always true. */
12260 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12261 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12262 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12263
12264 /* X ^ !X is always true. */
12265 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12266 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12267 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12268
12269 return NULL_TREE;
12270
12271 case EQ_EXPR:
12272 case NE_EXPR:
12273 STRIP_NOPS (arg0);
12274 STRIP_NOPS (arg1);
12275
12276 tem = fold_comparison (loc, code, type, op0, op1);
12277 if (tem != NULL_TREE)
12278 return tem;
12279
12280 /* bool_var != 0 becomes bool_var. */
12281 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12282 && code == NE_EXPR)
12283 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12284
12285 /* bool_var == 1 becomes bool_var. */
12286 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12287 && code == EQ_EXPR)
12288 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12289
12290 /* bool_var != 1 becomes !bool_var. */
12291 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12292 && code == NE_EXPR)
12293 return fold_convert_loc (loc, type,
12294 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12295 TREE_TYPE (arg0), arg0));
12296
12297 /* bool_var == 0 becomes !bool_var. */
12298 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12299 && code == EQ_EXPR)
12300 return fold_convert_loc (loc, type,
12301 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12302 TREE_TYPE (arg0), arg0));
12303
12304 /* !exp != 0 becomes !exp */
12305 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12306 && code == NE_EXPR)
12307 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12308
12309 /* If this is an equality comparison of the address of two non-weak,
12310 unaliased symbols neither of which are extern (since we do not
12311 have access to attributes for externs), then we know the result. */
12312 if (TREE_CODE (arg0) == ADDR_EXPR
12313 && DECL_P (TREE_OPERAND (arg0, 0))
12314 && TREE_CODE (arg1) == ADDR_EXPR
12315 && DECL_P (TREE_OPERAND (arg1, 0)))
12316 {
12317 int equal;
12318
12319 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12320 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12321 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12322 ->equal_address_to (symtab_node::get_create
12323 (TREE_OPERAND (arg1, 0)));
12324 else
12325 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12326 if (equal != 2)
12327 return constant_boolean_node (equal
12328 ? code == EQ_EXPR : code != EQ_EXPR,
12329 type);
12330 }
12331
12332 /* Similarly for a NEGATE_EXPR. */
12333 if (TREE_CODE (arg0) == NEGATE_EXPR
12334 && TREE_CODE (arg1) == INTEGER_CST
12335 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12336 arg1)))
12337 && TREE_CODE (tem) == INTEGER_CST
12338 && !TREE_OVERFLOW (tem))
12339 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12340
12341 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12342 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12343 && TREE_CODE (arg1) == INTEGER_CST
12344 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12345 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12346 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12347 fold_convert_loc (loc,
12348 TREE_TYPE (arg0),
12349 arg1),
12350 TREE_OPERAND (arg0, 1)));
12351
12352 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12353 if ((TREE_CODE (arg0) == PLUS_EXPR
12354 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12355 || TREE_CODE (arg0) == MINUS_EXPR)
12356 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12357 0)),
12358 arg1, 0)
12359 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12360 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12361 {
12362 tree val = TREE_OPERAND (arg0, 1);
12363 return omit_two_operands_loc (loc, type,
12364 fold_build2_loc (loc, code, type,
12365 val,
12366 build_int_cst (TREE_TYPE (val),
12367 0)),
12368 TREE_OPERAND (arg0, 0), arg1);
12369 }
12370
12371 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12372 if (TREE_CODE (arg0) == MINUS_EXPR
12373 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12374 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12375 1)),
12376 arg1, 0)
12377 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12378 {
12379 return omit_two_operands_loc (loc, type,
12380 code == NE_EXPR
12381 ? boolean_true_node : boolean_false_node,
12382 TREE_OPERAND (arg0, 1), arg1);
12383 }
12384
12385 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12386 if (TREE_CODE (arg0) == ABS_EXPR
12387 && (integer_zerop (arg1) || real_zerop (arg1)))
12388 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12389
12390 /* If this is an EQ or NE comparison with zero and ARG0 is
12391 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12392 two operations, but the latter can be done in one less insn
12393 on machines that have only two-operand insns or on which a
12394 constant cannot be the first operand. */
12395 if (TREE_CODE (arg0) == BIT_AND_EXPR
12396 && integer_zerop (arg1))
12397 {
12398 tree arg00 = TREE_OPERAND (arg0, 0);
12399 tree arg01 = TREE_OPERAND (arg0, 1);
12400 if (TREE_CODE (arg00) == LSHIFT_EXPR
12401 && integer_onep (TREE_OPERAND (arg00, 0)))
12402 {
12403 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12404 arg01, TREE_OPERAND (arg00, 1));
12405 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12406 build_int_cst (TREE_TYPE (arg0), 1));
12407 return fold_build2_loc (loc, code, type,
12408 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12409 arg1);
12410 }
12411 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12412 && integer_onep (TREE_OPERAND (arg01, 0)))
12413 {
12414 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12415 arg00, TREE_OPERAND (arg01, 1));
12416 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12417 build_int_cst (TREE_TYPE (arg0), 1));
12418 return fold_build2_loc (loc, code, type,
12419 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12420 arg1);
12421 }
12422 }
12423
12424 /* If this is an NE or EQ comparison of zero against the result of a
12425 signed MOD operation whose second operand is a power of 2, make
12426 the MOD operation unsigned since it is simpler and equivalent. */
12427 if (integer_zerop (arg1)
12428 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12429 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12430 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12431 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12432 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12433 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12434 {
12435 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12436 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12437 fold_convert_loc (loc, newtype,
12438 TREE_OPERAND (arg0, 0)),
12439 fold_convert_loc (loc, newtype,
12440 TREE_OPERAND (arg0, 1)));
12441
12442 return fold_build2_loc (loc, code, type, newmod,
12443 fold_convert_loc (loc, newtype, arg1));
12444 }
12445
12446 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12447 C1 is a valid shift constant, and C2 is a power of two, i.e.
12448 a single bit. */
12449 if (TREE_CODE (arg0) == BIT_AND_EXPR
12450 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12451 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12452 == INTEGER_CST
12453 && integer_pow2p (TREE_OPERAND (arg0, 1))
12454 && integer_zerop (arg1))
12455 {
12456 tree itype = TREE_TYPE (arg0);
12457 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12458 prec = TYPE_PRECISION (itype);
12459
12460 /* Check for a valid shift count. */
12461 if (wi::ltu_p (arg001, prec))
12462 {
12463 tree arg01 = TREE_OPERAND (arg0, 1);
12464 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12465 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12466 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12467 can be rewritten as (X & (C2 << C1)) != 0. */
12468 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12469 {
12470 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12471 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12472 return fold_build2_loc (loc, code, type, tem,
12473 fold_convert_loc (loc, itype, arg1));
12474 }
12475 /* Otherwise, for signed (arithmetic) shifts,
12476 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12477 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12478 else if (!TYPE_UNSIGNED (itype))
12479 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12480 arg000, build_int_cst (itype, 0));
12481 /* Otherwise, of unsigned (logical) shifts,
12482 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12483 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12484 else
12485 return omit_one_operand_loc (loc, type,
12486 code == EQ_EXPR ? integer_one_node
12487 : integer_zero_node,
12488 arg000);
12489 }
12490 }
12491
12492 /* If we have (A & C) == C where C is a power of 2, convert this into
12493 (A & C) != 0. Similarly for NE_EXPR. */
12494 if (TREE_CODE (arg0) == BIT_AND_EXPR
12495 && integer_pow2p (TREE_OPERAND (arg0, 1))
12496 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12497 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12498 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12499 integer_zero_node));
12500
12501 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12502 bit, then fold the expression into A < 0 or A >= 0. */
12503 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12504 if (tem)
12505 return tem;
12506
12507 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12508 Similarly for NE_EXPR. */
12509 if (TREE_CODE (arg0) == BIT_AND_EXPR
12510 && TREE_CODE (arg1) == INTEGER_CST
12511 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12512 {
12513 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12514 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12515 TREE_OPERAND (arg0, 1));
12516 tree dandnotc
12517 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12518 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12519 notc);
12520 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12521 if (integer_nonzerop (dandnotc))
12522 return omit_one_operand_loc (loc, type, rslt, arg0);
12523 }
12524
12525 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12526 Similarly for NE_EXPR. */
12527 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12528 && TREE_CODE (arg1) == INTEGER_CST
12529 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12530 {
12531 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12532 tree candnotd
12533 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12534 TREE_OPERAND (arg0, 1),
12535 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12536 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12537 if (integer_nonzerop (candnotd))
12538 return omit_one_operand_loc (loc, type, rslt, arg0);
12539 }
12540
12541 /* If this is a comparison of a field, we may be able to simplify it. */
12542 if ((TREE_CODE (arg0) == COMPONENT_REF
12543 || TREE_CODE (arg0) == BIT_FIELD_REF)
12544 /* Handle the constant case even without -O
12545 to make sure the warnings are given. */
12546 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12547 {
12548 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12549 if (t1)
12550 return t1;
12551 }
12552
12553 /* Optimize comparisons of strlen vs zero to a compare of the
12554 first character of the string vs zero. To wit,
12555 strlen(ptr) == 0 => *ptr == 0
12556 strlen(ptr) != 0 => *ptr != 0
12557 Other cases should reduce to one of these two (or a constant)
12558 due to the return value of strlen being unsigned. */
12559 if (TREE_CODE (arg0) == CALL_EXPR
12560 && integer_zerop (arg1))
12561 {
12562 tree fndecl = get_callee_fndecl (arg0);
12563
12564 if (fndecl
12565 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12566 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12567 && call_expr_nargs (arg0) == 1
12568 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12569 {
12570 tree iref = build_fold_indirect_ref_loc (loc,
12571 CALL_EXPR_ARG (arg0, 0));
12572 return fold_build2_loc (loc, code, type, iref,
12573 build_int_cst (TREE_TYPE (iref), 0));
12574 }
12575 }
12576
12577 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12578 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12579 if (TREE_CODE (arg0) == RSHIFT_EXPR
12580 && integer_zerop (arg1)
12581 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12582 {
12583 tree arg00 = TREE_OPERAND (arg0, 0);
12584 tree arg01 = TREE_OPERAND (arg0, 1);
12585 tree itype = TREE_TYPE (arg00);
12586 if (wi::eq_p (arg01, element_precision (itype) - 1))
12587 {
12588 if (TYPE_UNSIGNED (itype))
12589 {
12590 itype = signed_type_for (itype);
12591 arg00 = fold_convert_loc (loc, itype, arg00);
12592 }
12593 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12594 type, arg00, build_zero_cst (itype));
12595 }
12596 }
12597
12598 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12599 if (integer_zerop (arg1)
12600 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12601 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12602 TREE_OPERAND (arg0, 1));
12603
12604 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12605 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12606 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12607 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12608 build_zero_cst (TREE_TYPE (arg0)));
12609 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12610 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12611 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12612 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12613 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12614 build_zero_cst (TREE_TYPE (arg0)));
12615
12616 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12617 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12618 && TREE_CODE (arg1) == INTEGER_CST
12619 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12620 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12621 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12622 TREE_OPERAND (arg0, 1), arg1));
12623
12624 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12625 (X & C) == 0 when C is a single bit. */
12626 if (TREE_CODE (arg0) == BIT_AND_EXPR
12627 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12628 && integer_zerop (arg1)
12629 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12630 {
12631 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12632 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12633 TREE_OPERAND (arg0, 1));
12634 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12635 type, tem,
12636 fold_convert_loc (loc, TREE_TYPE (arg0),
12637 arg1));
12638 }
12639
12640 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12641 constant C is a power of two, i.e. a single bit. */
12642 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12643 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12644 && integer_zerop (arg1)
12645 && integer_pow2p (TREE_OPERAND (arg0, 1))
12646 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12647 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12648 {
12649 tree arg00 = TREE_OPERAND (arg0, 0);
12650 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12651 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12652 }
12653
12654 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12655 when is C is a power of two, i.e. a single bit. */
12656 if (TREE_CODE (arg0) == BIT_AND_EXPR
12657 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12658 && integer_zerop (arg1)
12659 && integer_pow2p (TREE_OPERAND (arg0, 1))
12660 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12661 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12662 {
12663 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12664 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12665 arg000, TREE_OPERAND (arg0, 1));
12666 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12667 tem, build_int_cst (TREE_TYPE (tem), 0));
12668 }
12669
12670 if (integer_zerop (arg1)
12671 && tree_expr_nonzero_p (arg0))
12672 {
12673 tree res = constant_boolean_node (code==NE_EXPR, type);
12674 return omit_one_operand_loc (loc, type, res, arg0);
12675 }
12676
12677 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12678 if (TREE_CODE (arg0) == NEGATE_EXPR
12679 && TREE_CODE (arg1) == NEGATE_EXPR)
12680 return fold_build2_loc (loc, code, type,
12681 TREE_OPERAND (arg0, 0),
12682 fold_convert_loc (loc, TREE_TYPE (arg0),
12683 TREE_OPERAND (arg1, 0)));
12684
12685 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12686 if (TREE_CODE (arg0) == BIT_AND_EXPR
12687 && TREE_CODE (arg1) == BIT_AND_EXPR)
12688 {
12689 tree arg00 = TREE_OPERAND (arg0, 0);
12690 tree arg01 = TREE_OPERAND (arg0, 1);
12691 tree arg10 = TREE_OPERAND (arg1, 0);
12692 tree arg11 = TREE_OPERAND (arg1, 1);
12693 tree itype = TREE_TYPE (arg0);
12694
12695 if (operand_equal_p (arg01, arg11, 0))
12696 return fold_build2_loc (loc, code, type,
12697 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12698 fold_build2_loc (loc,
12699 BIT_XOR_EXPR, itype,
12700 arg00, arg10),
12701 arg01),
12702 build_zero_cst (itype));
12703
12704 if (operand_equal_p (arg01, arg10, 0))
12705 return fold_build2_loc (loc, code, type,
12706 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12707 fold_build2_loc (loc,
12708 BIT_XOR_EXPR, itype,
12709 arg00, arg11),
12710 arg01),
12711 build_zero_cst (itype));
12712
12713 if (operand_equal_p (arg00, arg11, 0))
12714 return fold_build2_loc (loc, code, type,
12715 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12716 fold_build2_loc (loc,
12717 BIT_XOR_EXPR, itype,
12718 arg01, arg10),
12719 arg00),
12720 build_zero_cst (itype));
12721
12722 if (operand_equal_p (arg00, arg10, 0))
12723 return fold_build2_loc (loc, code, type,
12724 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12725 fold_build2_loc (loc,
12726 BIT_XOR_EXPR, itype,
12727 arg01, arg11),
12728 arg00),
12729 build_zero_cst (itype));
12730 }
12731
12732 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12733 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12734 {
12735 tree arg00 = TREE_OPERAND (arg0, 0);
12736 tree arg01 = TREE_OPERAND (arg0, 1);
12737 tree arg10 = TREE_OPERAND (arg1, 0);
12738 tree arg11 = TREE_OPERAND (arg1, 1);
12739 tree itype = TREE_TYPE (arg0);
12740
12741 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12742 operand_equal_p guarantees no side-effects so we don't need
12743 to use omit_one_operand on Z. */
12744 if (operand_equal_p (arg01, arg11, 0))
12745 return fold_build2_loc (loc, code, type, arg00,
12746 fold_convert_loc (loc, TREE_TYPE (arg00),
12747 arg10));
12748 if (operand_equal_p (arg01, arg10, 0))
12749 return fold_build2_loc (loc, code, type, arg00,
12750 fold_convert_loc (loc, TREE_TYPE (arg00),
12751 arg11));
12752 if (operand_equal_p (arg00, arg11, 0))
12753 return fold_build2_loc (loc, code, type, arg01,
12754 fold_convert_loc (loc, TREE_TYPE (arg01),
12755 arg10));
12756 if (operand_equal_p (arg00, arg10, 0))
12757 return fold_build2_loc (loc, code, type, arg01,
12758 fold_convert_loc (loc, TREE_TYPE (arg01),
12759 arg11));
12760
12761 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12762 if (TREE_CODE (arg01) == INTEGER_CST
12763 && TREE_CODE (arg11) == INTEGER_CST)
12764 {
12765 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12766 fold_convert_loc (loc, itype, arg11));
12767 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12768 return fold_build2_loc (loc, code, type, tem,
12769 fold_convert_loc (loc, itype, arg10));
12770 }
12771 }
12772
12773 /* Attempt to simplify equality/inequality comparisons of complex
12774 values. Only lower the comparison if the result is known or
12775 can be simplified to a single scalar comparison. */
12776 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12777 || TREE_CODE (arg0) == COMPLEX_CST)
12778 && (TREE_CODE (arg1) == COMPLEX_EXPR
12779 || TREE_CODE (arg1) == COMPLEX_CST))
12780 {
12781 tree real0, imag0, real1, imag1;
12782 tree rcond, icond;
12783
12784 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12785 {
12786 real0 = TREE_OPERAND (arg0, 0);
12787 imag0 = TREE_OPERAND (arg0, 1);
12788 }
12789 else
12790 {
12791 real0 = TREE_REALPART (arg0);
12792 imag0 = TREE_IMAGPART (arg0);
12793 }
12794
12795 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12796 {
12797 real1 = TREE_OPERAND (arg1, 0);
12798 imag1 = TREE_OPERAND (arg1, 1);
12799 }
12800 else
12801 {
12802 real1 = TREE_REALPART (arg1);
12803 imag1 = TREE_IMAGPART (arg1);
12804 }
12805
12806 rcond = fold_binary_loc (loc, code, type, real0, real1);
12807 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12808 {
12809 if (integer_zerop (rcond))
12810 {
12811 if (code == EQ_EXPR)
12812 return omit_two_operands_loc (loc, type, boolean_false_node,
12813 imag0, imag1);
12814 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12815 }
12816 else
12817 {
12818 if (code == NE_EXPR)
12819 return omit_two_operands_loc (loc, type, boolean_true_node,
12820 imag0, imag1);
12821 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12822 }
12823 }
12824
12825 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12826 if (icond && TREE_CODE (icond) == INTEGER_CST)
12827 {
12828 if (integer_zerop (icond))
12829 {
12830 if (code == EQ_EXPR)
12831 return omit_two_operands_loc (loc, type, boolean_false_node,
12832 real0, real1);
12833 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12834 }
12835 else
12836 {
12837 if (code == NE_EXPR)
12838 return omit_two_operands_loc (loc, type, boolean_true_node,
12839 real0, real1);
12840 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12841 }
12842 }
12843 }
12844
12845 return NULL_TREE;
12846
12847 case LT_EXPR:
12848 case GT_EXPR:
12849 case LE_EXPR:
12850 case GE_EXPR:
12851 tem = fold_comparison (loc, code, type, op0, op1);
12852 if (tem != NULL_TREE)
12853 return tem;
12854
12855 /* Transform comparisons of the form X +- C CMP X. */
12856 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12857 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12858 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12859 && !HONOR_SNANS (arg0))
12860 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12861 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12862 {
12863 tree arg01 = TREE_OPERAND (arg0, 1);
12864 enum tree_code code0 = TREE_CODE (arg0);
12865 int is_positive;
12866
12867 if (TREE_CODE (arg01) == REAL_CST)
12868 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12869 else
12870 is_positive = tree_int_cst_sgn (arg01);
12871
12872 /* (X - c) > X becomes false. */
12873 if (code == GT_EXPR
12874 && ((code0 == MINUS_EXPR && is_positive >= 0)
12875 || (code0 == PLUS_EXPR && is_positive <= 0)))
12876 {
12877 if (TREE_CODE (arg01) == INTEGER_CST
12878 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12879 fold_overflow_warning (("assuming signed overflow does not "
12880 "occur when assuming that (X - c) > X "
12881 "is always false"),
12882 WARN_STRICT_OVERFLOW_ALL);
12883 return constant_boolean_node (0, type);
12884 }
12885
12886 /* Likewise (X + c) < X becomes false. */
12887 if (code == LT_EXPR
12888 && ((code0 == PLUS_EXPR && is_positive >= 0)
12889 || (code0 == MINUS_EXPR && is_positive <= 0)))
12890 {
12891 if (TREE_CODE (arg01) == INTEGER_CST
12892 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12893 fold_overflow_warning (("assuming signed overflow does not "
12894 "occur when assuming that "
12895 "(X + c) < X is always false"),
12896 WARN_STRICT_OVERFLOW_ALL);
12897 return constant_boolean_node (0, type);
12898 }
12899
12900 /* Convert (X - c) <= X to true. */
12901 if (!HONOR_NANS (arg1)
12902 && code == LE_EXPR
12903 && ((code0 == MINUS_EXPR && is_positive >= 0)
12904 || (code0 == PLUS_EXPR && is_positive <= 0)))
12905 {
12906 if (TREE_CODE (arg01) == INTEGER_CST
12907 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12908 fold_overflow_warning (("assuming signed overflow does not "
12909 "occur when assuming that "
12910 "(X - c) <= X is always true"),
12911 WARN_STRICT_OVERFLOW_ALL);
12912 return constant_boolean_node (1, type);
12913 }
12914
12915 /* Convert (X + c) >= X to true. */
12916 if (!HONOR_NANS (arg1)
12917 && code == GE_EXPR
12918 && ((code0 == PLUS_EXPR && is_positive >= 0)
12919 || (code0 == MINUS_EXPR && is_positive <= 0)))
12920 {
12921 if (TREE_CODE (arg01) == INTEGER_CST
12922 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12923 fold_overflow_warning (("assuming signed overflow does not "
12924 "occur when assuming that "
12925 "(X + c) >= X is always true"),
12926 WARN_STRICT_OVERFLOW_ALL);
12927 return constant_boolean_node (1, type);
12928 }
12929
12930 if (TREE_CODE (arg01) == INTEGER_CST)
12931 {
12932 /* Convert X + c > X and X - c < X to true for integers. */
12933 if (code == GT_EXPR
12934 && ((code0 == PLUS_EXPR && is_positive > 0)
12935 || (code0 == MINUS_EXPR && is_positive < 0)))
12936 {
12937 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12938 fold_overflow_warning (("assuming signed overflow does "
12939 "not occur when assuming that "
12940 "(X + c) > X is always true"),
12941 WARN_STRICT_OVERFLOW_ALL);
12942 return constant_boolean_node (1, type);
12943 }
12944
12945 if (code == LT_EXPR
12946 && ((code0 == MINUS_EXPR && is_positive > 0)
12947 || (code0 == PLUS_EXPR && is_positive < 0)))
12948 {
12949 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12950 fold_overflow_warning (("assuming signed overflow does "
12951 "not occur when assuming that "
12952 "(X - c) < X is always true"),
12953 WARN_STRICT_OVERFLOW_ALL);
12954 return constant_boolean_node (1, type);
12955 }
12956
12957 /* Convert X + c <= X and X - c >= X to false for integers. */
12958 if (code == LE_EXPR
12959 && ((code0 == PLUS_EXPR && is_positive > 0)
12960 || (code0 == MINUS_EXPR && is_positive < 0)))
12961 {
12962 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12963 fold_overflow_warning (("assuming signed overflow does "
12964 "not occur when assuming that "
12965 "(X + c) <= X is always false"),
12966 WARN_STRICT_OVERFLOW_ALL);
12967 return constant_boolean_node (0, type);
12968 }
12969
12970 if (code == GE_EXPR
12971 && ((code0 == MINUS_EXPR && is_positive > 0)
12972 || (code0 == PLUS_EXPR && is_positive < 0)))
12973 {
12974 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12975 fold_overflow_warning (("assuming signed overflow does "
12976 "not occur when assuming that "
12977 "(X - c) >= X is always false"),
12978 WARN_STRICT_OVERFLOW_ALL);
12979 return constant_boolean_node (0, type);
12980 }
12981 }
12982 }
12983
12984 /* Comparisons with the highest or lowest possible integer of
12985 the specified precision will have known values. */
12986 {
12987 tree arg1_type = TREE_TYPE (arg1);
12988 unsigned int prec = TYPE_PRECISION (arg1_type);
12989
12990 if (TREE_CODE (arg1) == INTEGER_CST
12991 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12992 {
12993 wide_int max = wi::max_value (arg1_type);
12994 wide_int signed_max = wi::max_value (prec, SIGNED);
12995 wide_int min = wi::min_value (arg1_type);
12996
12997 if (wi::eq_p (arg1, max))
12998 switch (code)
12999 {
13000 case GT_EXPR:
13001 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13002
13003 case GE_EXPR:
13004 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13005
13006 case LE_EXPR:
13007 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13008
13009 case LT_EXPR:
13010 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13011
13012 /* The GE_EXPR and LT_EXPR cases above are not normally
13013 reached because of previous transformations. */
13014
13015 default:
13016 break;
13017 }
13018 else if (wi::eq_p (arg1, max - 1))
13019 switch (code)
13020 {
13021 case GT_EXPR:
13022 arg1 = const_binop (PLUS_EXPR, arg1,
13023 build_int_cst (TREE_TYPE (arg1), 1));
13024 return fold_build2_loc (loc, EQ_EXPR, type,
13025 fold_convert_loc (loc,
13026 TREE_TYPE (arg1), arg0),
13027 arg1);
13028 case LE_EXPR:
13029 arg1 = const_binop (PLUS_EXPR, arg1,
13030 build_int_cst (TREE_TYPE (arg1), 1));
13031 return fold_build2_loc (loc, NE_EXPR, type,
13032 fold_convert_loc (loc, TREE_TYPE (arg1),
13033 arg0),
13034 arg1);
13035 default:
13036 break;
13037 }
13038 else if (wi::eq_p (arg1, min))
13039 switch (code)
13040 {
13041 case LT_EXPR:
13042 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13043
13044 case LE_EXPR:
13045 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13046
13047 case GE_EXPR:
13048 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13049
13050 case GT_EXPR:
13051 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13052
13053 default:
13054 break;
13055 }
13056 else if (wi::eq_p (arg1, min + 1))
13057 switch (code)
13058 {
13059 case GE_EXPR:
13060 arg1 = const_binop (MINUS_EXPR, arg1,
13061 build_int_cst (TREE_TYPE (arg1), 1));
13062 return fold_build2_loc (loc, NE_EXPR, type,
13063 fold_convert_loc (loc,
13064 TREE_TYPE (arg1), arg0),
13065 arg1);
13066 case LT_EXPR:
13067 arg1 = const_binop (MINUS_EXPR, arg1,
13068 build_int_cst (TREE_TYPE (arg1), 1));
13069 return fold_build2_loc (loc, EQ_EXPR, type,
13070 fold_convert_loc (loc, TREE_TYPE (arg1),
13071 arg0),
13072 arg1);
13073 default:
13074 break;
13075 }
13076
13077 else if (wi::eq_p (arg1, signed_max)
13078 && TYPE_UNSIGNED (arg1_type)
13079 /* We will flip the signedness of the comparison operator
13080 associated with the mode of arg1, so the sign bit is
13081 specified by this mode. Check that arg1 is the signed
13082 max associated with this sign bit. */
13083 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13084 /* signed_type does not work on pointer types. */
13085 && INTEGRAL_TYPE_P (arg1_type))
13086 {
13087 /* The following case also applies to X < signed_max+1
13088 and X >= signed_max+1 because previous transformations. */
13089 if (code == LE_EXPR || code == GT_EXPR)
13090 {
13091 tree st = signed_type_for (arg1_type);
13092 return fold_build2_loc (loc,
13093 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13094 type, fold_convert_loc (loc, st, arg0),
13095 build_int_cst (st, 0));
13096 }
13097 }
13098 }
13099 }
13100
13101 /* If we are comparing an ABS_EXPR with a constant, we can
13102 convert all the cases into explicit comparisons, but they may
13103 well not be faster than doing the ABS and one comparison.
13104 But ABS (X) <= C is a range comparison, which becomes a subtraction
13105 and a comparison, and is probably faster. */
13106 if (code == LE_EXPR
13107 && TREE_CODE (arg1) == INTEGER_CST
13108 && TREE_CODE (arg0) == ABS_EXPR
13109 && ! TREE_SIDE_EFFECTS (arg0)
13110 && (0 != (tem = negate_expr (arg1)))
13111 && TREE_CODE (tem) == INTEGER_CST
13112 && !TREE_OVERFLOW (tem))
13113 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13114 build2 (GE_EXPR, type,
13115 TREE_OPERAND (arg0, 0), tem),
13116 build2 (LE_EXPR, type,
13117 TREE_OPERAND (arg0, 0), arg1));
13118
13119 /* Convert ABS_EXPR<x> >= 0 to true. */
13120 strict_overflow_p = false;
13121 if (code == GE_EXPR
13122 && (integer_zerop (arg1)
13123 || (! HONOR_NANS (arg0)
13124 && real_zerop (arg1)))
13125 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13126 {
13127 if (strict_overflow_p)
13128 fold_overflow_warning (("assuming signed overflow does not occur "
13129 "when simplifying comparison of "
13130 "absolute value and zero"),
13131 WARN_STRICT_OVERFLOW_CONDITIONAL);
13132 return omit_one_operand_loc (loc, type,
13133 constant_boolean_node (true, type),
13134 arg0);
13135 }
13136
13137 /* Convert ABS_EXPR<x> < 0 to false. */
13138 strict_overflow_p = false;
13139 if (code == LT_EXPR
13140 && (integer_zerop (arg1) || real_zerop (arg1))
13141 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13142 {
13143 if (strict_overflow_p)
13144 fold_overflow_warning (("assuming signed overflow does not occur "
13145 "when simplifying comparison of "
13146 "absolute value and zero"),
13147 WARN_STRICT_OVERFLOW_CONDITIONAL);
13148 return omit_one_operand_loc (loc, type,
13149 constant_boolean_node (false, type),
13150 arg0);
13151 }
13152
13153 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13154 and similarly for >= into !=. */
13155 if ((code == LT_EXPR || code == GE_EXPR)
13156 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13157 && TREE_CODE (arg1) == LSHIFT_EXPR
13158 && integer_onep (TREE_OPERAND (arg1, 0)))
13159 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13160 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13161 TREE_OPERAND (arg1, 1)),
13162 build_zero_cst (TREE_TYPE (arg0)));
13163
13164 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13165 otherwise Y might be >= # of bits in X's type and thus e.g.
13166 (unsigned char) (1 << Y) for Y 15 might be 0.
13167 If the cast is widening, then 1 << Y should have unsigned type,
13168 otherwise if Y is number of bits in the signed shift type minus 1,
13169 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13170 31 might be 0xffffffff80000000. */
13171 if ((code == LT_EXPR || code == GE_EXPR)
13172 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13173 && CONVERT_EXPR_P (arg1)
13174 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13175 && (element_precision (TREE_TYPE (arg1))
13176 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13177 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13178 || (element_precision (TREE_TYPE (arg1))
13179 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13180 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13181 {
13182 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13183 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13184 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13185 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13186 build_zero_cst (TREE_TYPE (arg0)));
13187 }
13188
13189 return NULL_TREE;
13190
13191 case UNORDERED_EXPR:
13192 case ORDERED_EXPR:
13193 case UNLT_EXPR:
13194 case UNLE_EXPR:
13195 case UNGT_EXPR:
13196 case UNGE_EXPR:
13197 case UNEQ_EXPR:
13198 case LTGT_EXPR:
13199 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13200 {
13201 t1 = fold_relational_const (code, type, arg0, arg1);
13202 if (t1 != NULL_TREE)
13203 return t1;
13204 }
13205
13206 /* If the first operand is NaN, the result is constant. */
13207 if (TREE_CODE (arg0) == REAL_CST
13208 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13209 && (code != LTGT_EXPR || ! flag_trapping_math))
13210 {
13211 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13212 ? integer_zero_node
13213 : integer_one_node;
13214 return omit_one_operand_loc (loc, type, t1, arg1);
13215 }
13216
13217 /* If the second operand is NaN, the result is constant. */
13218 if (TREE_CODE (arg1) == REAL_CST
13219 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13220 && (code != LTGT_EXPR || ! flag_trapping_math))
13221 {
13222 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13223 ? integer_zero_node
13224 : integer_one_node;
13225 return omit_one_operand_loc (loc, type, t1, arg0);
13226 }
13227
13228 /* Simplify unordered comparison of something with itself. */
13229 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13230 && operand_equal_p (arg0, arg1, 0))
13231 return constant_boolean_node (1, type);
13232
13233 if (code == LTGT_EXPR
13234 && !flag_trapping_math
13235 && operand_equal_p (arg0, arg1, 0))
13236 return constant_boolean_node (0, type);
13237
13238 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13239 {
13240 tree targ0 = strip_float_extensions (arg0);
13241 tree targ1 = strip_float_extensions (arg1);
13242 tree newtype = TREE_TYPE (targ0);
13243
13244 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13245 newtype = TREE_TYPE (targ1);
13246
13247 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13248 return fold_build2_loc (loc, code, type,
13249 fold_convert_loc (loc, newtype, targ0),
13250 fold_convert_loc (loc, newtype, targ1));
13251 }
13252
13253 return NULL_TREE;
13254
13255 case COMPOUND_EXPR:
13256 /* When pedantic, a compound expression can be neither an lvalue
13257 nor an integer constant expression. */
13258 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13259 return NULL_TREE;
13260 /* Don't let (0, 0) be null pointer constant. */
13261 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13262 : fold_convert_loc (loc, type, arg1);
13263 return pedantic_non_lvalue_loc (loc, tem);
13264
13265 case ASSERT_EXPR:
13266 /* An ASSERT_EXPR should never be passed to fold_binary. */
13267 gcc_unreachable ();
13268
13269 default:
13270 return NULL_TREE;
13271 } /* switch (code) */
13272 }
13273
13274 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13275 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13276 of GOTO_EXPR. */
13277
13278 static tree
13279 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13280 {
13281 switch (TREE_CODE (*tp))
13282 {
13283 case LABEL_EXPR:
13284 return *tp;
13285
13286 case GOTO_EXPR:
13287 *walk_subtrees = 0;
13288
13289 /* ... fall through ... */
13290
13291 default:
13292 return NULL_TREE;
13293 }
13294 }
13295
13296 /* Return whether the sub-tree ST contains a label which is accessible from
13297 outside the sub-tree. */
13298
13299 static bool
13300 contains_label_p (tree st)
13301 {
13302 return
13303 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13304 }
13305
13306 /* Fold a ternary expression of code CODE and type TYPE with operands
13307 OP0, OP1, and OP2. Return the folded expression if folding is
13308 successful. Otherwise, return NULL_TREE. */
13309
13310 tree
13311 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13312 tree op0, tree op1, tree op2)
13313 {
13314 tree tem;
13315 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13316 enum tree_code_class kind = TREE_CODE_CLASS (code);
13317
13318 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13319 && TREE_CODE_LENGTH (code) == 3);
13320
13321 /* If this is a commutative operation, and OP0 is a constant, move it
13322 to OP1 to reduce the number of tests below. */
13323 if (commutative_ternary_tree_code (code)
13324 && tree_swap_operands_p (op0, op1, true))
13325 return fold_build3_loc (loc, code, type, op1, op0, op2);
13326
13327 tem = generic_simplify (loc, code, type, op0, op1, op2);
13328 if (tem)
13329 return tem;
13330
13331 /* Strip any conversions that don't change the mode. This is safe
13332 for every expression, except for a comparison expression because
13333 its signedness is derived from its operands. So, in the latter
13334 case, only strip conversions that don't change the signedness.
13335
13336 Note that this is done as an internal manipulation within the
13337 constant folder, in order to find the simplest representation of
13338 the arguments so that their form can be studied. In any cases,
13339 the appropriate type conversions should be put back in the tree
13340 that will get out of the constant folder. */
13341 if (op0)
13342 {
13343 arg0 = op0;
13344 STRIP_NOPS (arg0);
13345 }
13346
13347 if (op1)
13348 {
13349 arg1 = op1;
13350 STRIP_NOPS (arg1);
13351 }
13352
13353 if (op2)
13354 {
13355 arg2 = op2;
13356 STRIP_NOPS (arg2);
13357 }
13358
13359 switch (code)
13360 {
13361 case COMPONENT_REF:
13362 if (TREE_CODE (arg0) == CONSTRUCTOR
13363 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13364 {
13365 unsigned HOST_WIDE_INT idx;
13366 tree field, value;
13367 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13368 if (field == arg1)
13369 return value;
13370 }
13371 return NULL_TREE;
13372
13373 case COND_EXPR:
13374 case VEC_COND_EXPR:
13375 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13376 so all simple results must be passed through pedantic_non_lvalue. */
13377 if (TREE_CODE (arg0) == INTEGER_CST)
13378 {
13379 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13380 tem = integer_zerop (arg0) ? op2 : op1;
13381 /* Only optimize constant conditions when the selected branch
13382 has the same type as the COND_EXPR. This avoids optimizing
13383 away "c ? x : throw", where the throw has a void type.
13384 Avoid throwing away that operand which contains label. */
13385 if ((!TREE_SIDE_EFFECTS (unused_op)
13386 || !contains_label_p (unused_op))
13387 && (! VOID_TYPE_P (TREE_TYPE (tem))
13388 || VOID_TYPE_P (type)))
13389 return pedantic_non_lvalue_loc (loc, tem);
13390 return NULL_TREE;
13391 }
13392 else if (TREE_CODE (arg0) == VECTOR_CST)
13393 {
13394 if ((TREE_CODE (arg1) == VECTOR_CST
13395 || TREE_CODE (arg1) == CONSTRUCTOR)
13396 && (TREE_CODE (arg2) == VECTOR_CST
13397 || TREE_CODE (arg2) == CONSTRUCTOR))
13398 {
13399 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13400 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13401 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13402 for (i = 0; i < nelts; i++)
13403 {
13404 tree val = VECTOR_CST_ELT (arg0, i);
13405 if (integer_all_onesp (val))
13406 sel[i] = i;
13407 else if (integer_zerop (val))
13408 sel[i] = nelts + i;
13409 else /* Currently unreachable. */
13410 return NULL_TREE;
13411 }
13412 tree t = fold_vec_perm (type, arg1, arg2, sel);
13413 if (t != NULL_TREE)
13414 return t;
13415 }
13416 }
13417
13418 /* If we have A op B ? A : C, we may be able to convert this to a
13419 simpler expression, depending on the operation and the values
13420 of B and C. Signed zeros prevent all of these transformations,
13421 for reasons given above each one.
13422
13423 Also try swapping the arguments and inverting the conditional. */
13424 if (COMPARISON_CLASS_P (arg0)
13425 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13426 arg1, TREE_OPERAND (arg0, 1))
13427 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13428 {
13429 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13430 if (tem)
13431 return tem;
13432 }
13433
13434 if (COMPARISON_CLASS_P (arg0)
13435 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13436 op2,
13437 TREE_OPERAND (arg0, 1))
13438 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13439 {
13440 location_t loc0 = expr_location_or (arg0, loc);
13441 tem = fold_invert_truthvalue (loc0, arg0);
13442 if (tem && COMPARISON_CLASS_P (tem))
13443 {
13444 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13445 if (tem)
13446 return tem;
13447 }
13448 }
13449
13450 /* If the second operand is simpler than the third, swap them
13451 since that produces better jump optimization results. */
13452 if (truth_value_p (TREE_CODE (arg0))
13453 && tree_swap_operands_p (op1, op2, false))
13454 {
13455 location_t loc0 = expr_location_or (arg0, loc);
13456 /* See if this can be inverted. If it can't, possibly because
13457 it was a floating-point inequality comparison, don't do
13458 anything. */
13459 tem = fold_invert_truthvalue (loc0, arg0);
13460 if (tem)
13461 return fold_build3_loc (loc, code, type, tem, op2, op1);
13462 }
13463
13464 /* Convert A ? 1 : 0 to simply A. */
13465 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13466 : (integer_onep (op1)
13467 && !VECTOR_TYPE_P (type)))
13468 && integer_zerop (op2)
13469 /* If we try to convert OP0 to our type, the
13470 call to fold will try to move the conversion inside
13471 a COND, which will recurse. In that case, the COND_EXPR
13472 is probably the best choice, so leave it alone. */
13473 && type == TREE_TYPE (arg0))
13474 return pedantic_non_lvalue_loc (loc, arg0);
13475
13476 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13477 over COND_EXPR in cases such as floating point comparisons. */
13478 if (integer_zerop (op1)
13479 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13480 : (integer_onep (op2)
13481 && !VECTOR_TYPE_P (type)))
13482 && truth_value_p (TREE_CODE (arg0)))
13483 return pedantic_non_lvalue_loc (loc,
13484 fold_convert_loc (loc, type,
13485 invert_truthvalue_loc (loc,
13486 arg0)));
13487
13488 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13489 if (TREE_CODE (arg0) == LT_EXPR
13490 && integer_zerop (TREE_OPERAND (arg0, 1))
13491 && integer_zerop (op2)
13492 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13493 {
13494 /* sign_bit_p looks through both zero and sign extensions,
13495 but for this optimization only sign extensions are
13496 usable. */
13497 tree tem2 = TREE_OPERAND (arg0, 0);
13498 while (tem != tem2)
13499 {
13500 if (TREE_CODE (tem2) != NOP_EXPR
13501 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13502 {
13503 tem = NULL_TREE;
13504 break;
13505 }
13506 tem2 = TREE_OPERAND (tem2, 0);
13507 }
13508 /* sign_bit_p only checks ARG1 bits within A's precision.
13509 If <sign bit of A> has wider type than A, bits outside
13510 of A's precision in <sign bit of A> need to be checked.
13511 If they are all 0, this optimization needs to be done
13512 in unsigned A's type, if they are all 1 in signed A's type,
13513 otherwise this can't be done. */
13514 if (tem
13515 && TYPE_PRECISION (TREE_TYPE (tem))
13516 < TYPE_PRECISION (TREE_TYPE (arg1))
13517 && TYPE_PRECISION (TREE_TYPE (tem))
13518 < TYPE_PRECISION (type))
13519 {
13520 int inner_width, outer_width;
13521 tree tem_type;
13522
13523 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13524 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13525 if (outer_width > TYPE_PRECISION (type))
13526 outer_width = TYPE_PRECISION (type);
13527
13528 wide_int mask = wi::shifted_mask
13529 (inner_width, outer_width - inner_width, false,
13530 TYPE_PRECISION (TREE_TYPE (arg1)));
13531
13532 wide_int common = mask & arg1;
13533 if (common == mask)
13534 {
13535 tem_type = signed_type_for (TREE_TYPE (tem));
13536 tem = fold_convert_loc (loc, tem_type, tem);
13537 }
13538 else if (common == 0)
13539 {
13540 tem_type = unsigned_type_for (TREE_TYPE (tem));
13541 tem = fold_convert_loc (loc, tem_type, tem);
13542 }
13543 else
13544 tem = NULL;
13545 }
13546
13547 if (tem)
13548 return
13549 fold_convert_loc (loc, type,
13550 fold_build2_loc (loc, BIT_AND_EXPR,
13551 TREE_TYPE (tem), tem,
13552 fold_convert_loc (loc,
13553 TREE_TYPE (tem),
13554 arg1)));
13555 }
13556
13557 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13558 already handled above. */
13559 if (TREE_CODE (arg0) == BIT_AND_EXPR
13560 && integer_onep (TREE_OPERAND (arg0, 1))
13561 && integer_zerop (op2)
13562 && integer_pow2p (arg1))
13563 {
13564 tree tem = TREE_OPERAND (arg0, 0);
13565 STRIP_NOPS (tem);
13566 if (TREE_CODE (tem) == RSHIFT_EXPR
13567 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13568 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13569 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13570 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13571 TREE_OPERAND (tem, 0), arg1);
13572 }
13573
13574 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13575 is probably obsolete because the first operand should be a
13576 truth value (that's why we have the two cases above), but let's
13577 leave it in until we can confirm this for all front-ends. */
13578 if (integer_zerop (op2)
13579 && TREE_CODE (arg0) == NE_EXPR
13580 && integer_zerop (TREE_OPERAND (arg0, 1))
13581 && integer_pow2p (arg1)
13582 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13583 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13584 arg1, OEP_ONLY_CONST))
13585 return pedantic_non_lvalue_loc (loc,
13586 fold_convert_loc (loc, type,
13587 TREE_OPERAND (arg0, 0)));
13588
13589 /* Disable the transformations below for vectors, since
13590 fold_binary_op_with_conditional_arg may undo them immediately,
13591 yielding an infinite loop. */
13592 if (code == VEC_COND_EXPR)
13593 return NULL_TREE;
13594
13595 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13596 if (integer_zerop (op2)
13597 && truth_value_p (TREE_CODE (arg0))
13598 && truth_value_p (TREE_CODE (arg1))
13599 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13600 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13601 : TRUTH_ANDIF_EXPR,
13602 type, fold_convert_loc (loc, type, arg0), arg1);
13603
13604 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13605 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13606 && truth_value_p (TREE_CODE (arg0))
13607 && truth_value_p (TREE_CODE (arg1))
13608 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13609 {
13610 location_t loc0 = expr_location_or (arg0, loc);
13611 /* Only perform transformation if ARG0 is easily inverted. */
13612 tem = fold_invert_truthvalue (loc0, arg0);
13613 if (tem)
13614 return fold_build2_loc (loc, code == VEC_COND_EXPR
13615 ? BIT_IOR_EXPR
13616 : TRUTH_ORIF_EXPR,
13617 type, fold_convert_loc (loc, type, tem),
13618 arg1);
13619 }
13620
13621 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13622 if (integer_zerop (arg1)
13623 && truth_value_p (TREE_CODE (arg0))
13624 && truth_value_p (TREE_CODE (op2))
13625 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13626 {
13627 location_t loc0 = expr_location_or (arg0, loc);
13628 /* Only perform transformation if ARG0 is easily inverted. */
13629 tem = fold_invert_truthvalue (loc0, arg0);
13630 if (tem)
13631 return fold_build2_loc (loc, code == VEC_COND_EXPR
13632 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13633 type, fold_convert_loc (loc, type, tem),
13634 op2);
13635 }
13636
13637 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13638 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13639 && truth_value_p (TREE_CODE (arg0))
13640 && truth_value_p (TREE_CODE (op2))
13641 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13642 return fold_build2_loc (loc, code == VEC_COND_EXPR
13643 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13644 type, fold_convert_loc (loc, type, arg0), op2);
13645
13646 return NULL_TREE;
13647
13648 case CALL_EXPR:
13649 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13650 of fold_ternary on them. */
13651 gcc_unreachable ();
13652
13653 case BIT_FIELD_REF:
13654 if ((TREE_CODE (arg0) == VECTOR_CST
13655 || (TREE_CODE (arg0) == CONSTRUCTOR
13656 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13657 && (type == TREE_TYPE (TREE_TYPE (arg0))
13658 || (TREE_CODE (type) == VECTOR_TYPE
13659 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13660 {
13661 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13662 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13663 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13664 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13665
13666 if (n != 0
13667 && (idx % width) == 0
13668 && (n % width) == 0
13669 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13670 {
13671 idx = idx / width;
13672 n = n / width;
13673
13674 if (TREE_CODE (arg0) == VECTOR_CST)
13675 {
13676 if (n == 1)
13677 return VECTOR_CST_ELT (arg0, idx);
13678
13679 tree *vals = XALLOCAVEC (tree, n);
13680 for (unsigned i = 0; i < n; ++i)
13681 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13682 return build_vector (type, vals);
13683 }
13684
13685 /* Constructor elements can be subvectors. */
13686 unsigned HOST_WIDE_INT k = 1;
13687 if (CONSTRUCTOR_NELTS (arg0) != 0)
13688 {
13689 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13690 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13691 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13692 }
13693
13694 /* We keep an exact subset of the constructor elements. */
13695 if ((idx % k) == 0 && (n % k) == 0)
13696 {
13697 if (CONSTRUCTOR_NELTS (arg0) == 0)
13698 return build_constructor (type, NULL);
13699 idx /= k;
13700 n /= k;
13701 if (n == 1)
13702 {
13703 if (idx < CONSTRUCTOR_NELTS (arg0))
13704 return CONSTRUCTOR_ELT (arg0, idx)->value;
13705 return build_zero_cst (type);
13706 }
13707
13708 vec<constructor_elt, va_gc> *vals;
13709 vec_alloc (vals, n);
13710 for (unsigned i = 0;
13711 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13712 ++i)
13713 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13714 CONSTRUCTOR_ELT
13715 (arg0, idx + i)->value);
13716 return build_constructor (type, vals);
13717 }
13718 /* The bitfield references a single constructor element. */
13719 else if (idx + n <= (idx / k + 1) * k)
13720 {
13721 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13722 return build_zero_cst (type);
13723 else if (n == k)
13724 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13725 else
13726 return fold_build3_loc (loc, code, type,
13727 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13728 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13729 }
13730 }
13731 }
13732
13733 /* A bit-field-ref that referenced the full argument can be stripped. */
13734 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13735 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13736 && integer_zerop (op2))
13737 return fold_convert_loc (loc, type, arg0);
13738
13739 /* On constants we can use native encode/interpret to constant
13740 fold (nearly) all BIT_FIELD_REFs. */
13741 if (CONSTANT_CLASS_P (arg0)
13742 && can_native_interpret_type_p (type)
13743 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13744 /* This limitation should not be necessary, we just need to
13745 round this up to mode size. */
13746 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13747 /* Need bit-shifting of the buffer to relax the following. */
13748 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13749 {
13750 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13751 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13752 unsigned HOST_WIDE_INT clen;
13753 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13754 /* ??? We cannot tell native_encode_expr to start at
13755 some random byte only. So limit us to a reasonable amount
13756 of work. */
13757 if (clen <= 4096)
13758 {
13759 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13760 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13761 if (len > 0
13762 && len * BITS_PER_UNIT >= bitpos + bitsize)
13763 {
13764 tree v = native_interpret_expr (type,
13765 b + bitpos / BITS_PER_UNIT,
13766 bitsize / BITS_PER_UNIT);
13767 if (v)
13768 return v;
13769 }
13770 }
13771 }
13772
13773 return NULL_TREE;
13774
13775 case FMA_EXPR:
13776 /* For integers we can decompose the FMA if possible. */
13777 if (TREE_CODE (arg0) == INTEGER_CST
13778 && TREE_CODE (arg1) == INTEGER_CST)
13779 return fold_build2_loc (loc, PLUS_EXPR, type,
13780 const_binop (MULT_EXPR, arg0, arg1), arg2);
13781 if (integer_zerop (arg2))
13782 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13783
13784 return fold_fma (loc, type, arg0, arg1, arg2);
13785
13786 case VEC_PERM_EXPR:
13787 if (TREE_CODE (arg2) == VECTOR_CST)
13788 {
13789 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13790 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13791 unsigned char *sel2 = sel + nelts;
13792 bool need_mask_canon = false;
13793 bool need_mask_canon2 = false;
13794 bool all_in_vec0 = true;
13795 bool all_in_vec1 = true;
13796 bool maybe_identity = true;
13797 bool single_arg = (op0 == op1);
13798 bool changed = false;
13799
13800 mask2 = 2 * nelts - 1;
13801 mask = single_arg ? (nelts - 1) : mask2;
13802 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13803 for (i = 0; i < nelts; i++)
13804 {
13805 tree val = VECTOR_CST_ELT (arg2, i);
13806 if (TREE_CODE (val) != INTEGER_CST)
13807 return NULL_TREE;
13808
13809 /* Make sure that the perm value is in an acceptable
13810 range. */
13811 wide_int t = val;
13812 need_mask_canon |= wi::gtu_p (t, mask);
13813 need_mask_canon2 |= wi::gtu_p (t, mask2);
13814 sel[i] = t.to_uhwi () & mask;
13815 sel2[i] = t.to_uhwi () & mask2;
13816
13817 if (sel[i] < nelts)
13818 all_in_vec1 = false;
13819 else
13820 all_in_vec0 = false;
13821
13822 if ((sel[i] & (nelts-1)) != i)
13823 maybe_identity = false;
13824 }
13825
13826 if (maybe_identity)
13827 {
13828 if (all_in_vec0)
13829 return op0;
13830 if (all_in_vec1)
13831 return op1;
13832 }
13833
13834 if (all_in_vec0)
13835 op1 = op0;
13836 else if (all_in_vec1)
13837 {
13838 op0 = op1;
13839 for (i = 0; i < nelts; i++)
13840 sel[i] -= nelts;
13841 need_mask_canon = true;
13842 }
13843
13844 if ((TREE_CODE (op0) == VECTOR_CST
13845 || TREE_CODE (op0) == CONSTRUCTOR)
13846 && (TREE_CODE (op1) == VECTOR_CST
13847 || TREE_CODE (op1) == CONSTRUCTOR))
13848 {
13849 tree t = fold_vec_perm (type, op0, op1, sel);
13850 if (t != NULL_TREE)
13851 return t;
13852 }
13853
13854 if (op0 == op1 && !single_arg)
13855 changed = true;
13856
13857 /* Some targets are deficient and fail to expand a single
13858 argument permutation while still allowing an equivalent
13859 2-argument version. */
13860 if (need_mask_canon && arg2 == op2
13861 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13862 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13863 {
13864 need_mask_canon = need_mask_canon2;
13865 sel = sel2;
13866 }
13867
13868 if (need_mask_canon && arg2 == op2)
13869 {
13870 tree *tsel = XALLOCAVEC (tree, nelts);
13871 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13872 for (i = 0; i < nelts; i++)
13873 tsel[i] = build_int_cst (eltype, sel[i]);
13874 op2 = build_vector (TREE_TYPE (arg2), tsel);
13875 changed = true;
13876 }
13877
13878 if (changed)
13879 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13880 }
13881 return NULL_TREE;
13882
13883 default:
13884 return NULL_TREE;
13885 } /* switch (code) */
13886 }
13887
13888 /* Perform constant folding and related simplification of EXPR.
13889 The related simplifications include x*1 => x, x*0 => 0, etc.,
13890 and application of the associative law.
13891 NOP_EXPR conversions may be removed freely (as long as we
13892 are careful not to change the type of the overall expression).
13893 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13894 but we can constant-fold them if they have constant operands. */
13895
13896 #ifdef ENABLE_FOLD_CHECKING
13897 # define fold(x) fold_1 (x)
13898 static tree fold_1 (tree);
13899 static
13900 #endif
13901 tree
13902 fold (tree expr)
13903 {
13904 const tree t = expr;
13905 enum tree_code code = TREE_CODE (t);
13906 enum tree_code_class kind = TREE_CODE_CLASS (code);
13907 tree tem;
13908 location_t loc = EXPR_LOCATION (expr);
13909
13910 /* Return right away if a constant. */
13911 if (kind == tcc_constant)
13912 return t;
13913
13914 /* CALL_EXPR-like objects with variable numbers of operands are
13915 treated specially. */
13916 if (kind == tcc_vl_exp)
13917 {
13918 if (code == CALL_EXPR)
13919 {
13920 tem = fold_call_expr (loc, expr, false);
13921 return tem ? tem : expr;
13922 }
13923 return expr;
13924 }
13925
13926 if (IS_EXPR_CODE_CLASS (kind))
13927 {
13928 tree type = TREE_TYPE (t);
13929 tree op0, op1, op2;
13930
13931 switch (TREE_CODE_LENGTH (code))
13932 {
13933 case 1:
13934 op0 = TREE_OPERAND (t, 0);
13935 tem = fold_unary_loc (loc, code, type, op0);
13936 return tem ? tem : expr;
13937 case 2:
13938 op0 = TREE_OPERAND (t, 0);
13939 op1 = TREE_OPERAND (t, 1);
13940 tem = fold_binary_loc (loc, code, type, op0, op1);
13941 return tem ? tem : expr;
13942 case 3:
13943 op0 = TREE_OPERAND (t, 0);
13944 op1 = TREE_OPERAND (t, 1);
13945 op2 = TREE_OPERAND (t, 2);
13946 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13947 return tem ? tem : expr;
13948 default:
13949 break;
13950 }
13951 }
13952
13953 switch (code)
13954 {
13955 case ARRAY_REF:
13956 {
13957 tree op0 = TREE_OPERAND (t, 0);
13958 tree op1 = TREE_OPERAND (t, 1);
13959
13960 if (TREE_CODE (op1) == INTEGER_CST
13961 && TREE_CODE (op0) == CONSTRUCTOR
13962 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13963 {
13964 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13965 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13966 unsigned HOST_WIDE_INT begin = 0;
13967
13968 /* Find a matching index by means of a binary search. */
13969 while (begin != end)
13970 {
13971 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13972 tree index = (*elts)[middle].index;
13973
13974 if (TREE_CODE (index) == INTEGER_CST
13975 && tree_int_cst_lt (index, op1))
13976 begin = middle + 1;
13977 else if (TREE_CODE (index) == INTEGER_CST
13978 && tree_int_cst_lt (op1, index))
13979 end = middle;
13980 else if (TREE_CODE (index) == RANGE_EXPR
13981 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13982 begin = middle + 1;
13983 else if (TREE_CODE (index) == RANGE_EXPR
13984 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13985 end = middle;
13986 else
13987 return (*elts)[middle].value;
13988 }
13989 }
13990
13991 return t;
13992 }
13993
13994 /* Return a VECTOR_CST if possible. */
13995 case CONSTRUCTOR:
13996 {
13997 tree type = TREE_TYPE (t);
13998 if (TREE_CODE (type) != VECTOR_TYPE)
13999 return t;
14000
14001 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
14002 unsigned HOST_WIDE_INT idx, pos = 0;
14003 tree value;
14004
14005 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
14006 {
14007 if (!CONSTANT_CLASS_P (value))
14008 return t;
14009 if (TREE_CODE (value) == VECTOR_CST)
14010 {
14011 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14012 vec[pos++] = VECTOR_CST_ELT (value, i);
14013 }
14014 else
14015 vec[pos++] = value;
14016 }
14017 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14018 vec[pos] = build_zero_cst (TREE_TYPE (type));
14019
14020 return build_vector (type, vec);
14021 }
14022
14023 case CONST_DECL:
14024 return fold (DECL_INITIAL (t));
14025
14026 default:
14027 return t;
14028 } /* switch (code) */
14029 }
14030
14031 #ifdef ENABLE_FOLD_CHECKING
14032 #undef fold
14033
14034 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14035 hash_table<pointer_hash<const tree_node> > *);
14036 static void fold_check_failed (const_tree, const_tree);
14037 void print_fold_checksum (const_tree);
14038
14039 /* When --enable-checking=fold, compute a digest of expr before
14040 and after actual fold call to see if fold did not accidentally
14041 change original expr. */
14042
14043 tree
14044 fold (tree expr)
14045 {
14046 tree ret;
14047 struct md5_ctx ctx;
14048 unsigned char checksum_before[16], checksum_after[16];
14049 hash_table<pointer_hash<const tree_node> > ht (32);
14050
14051 md5_init_ctx (&ctx);
14052 fold_checksum_tree (expr, &ctx, &ht);
14053 md5_finish_ctx (&ctx, checksum_before);
14054 ht.empty ();
14055
14056 ret = fold_1 (expr);
14057
14058 md5_init_ctx (&ctx);
14059 fold_checksum_tree (expr, &ctx, &ht);
14060 md5_finish_ctx (&ctx, checksum_after);
14061
14062 if (memcmp (checksum_before, checksum_after, 16))
14063 fold_check_failed (expr, ret);
14064
14065 return ret;
14066 }
14067
14068 void
14069 print_fold_checksum (const_tree expr)
14070 {
14071 struct md5_ctx ctx;
14072 unsigned char checksum[16], cnt;
14073 hash_table<pointer_hash<const tree_node> > ht (32);
14074
14075 md5_init_ctx (&ctx);
14076 fold_checksum_tree (expr, &ctx, &ht);
14077 md5_finish_ctx (&ctx, checksum);
14078 for (cnt = 0; cnt < 16; ++cnt)
14079 fprintf (stderr, "%02x", checksum[cnt]);
14080 putc ('\n', stderr);
14081 }
14082
14083 static void
14084 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14085 {
14086 internal_error ("fold check: original tree changed by fold");
14087 }
14088
14089 static void
14090 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14091 hash_table<pointer_hash <const tree_node> > *ht)
14092 {
14093 const tree_node **slot;
14094 enum tree_code code;
14095 union tree_node buf;
14096 int i, len;
14097
14098 recursive_label:
14099 if (expr == NULL)
14100 return;
14101 slot = ht->find_slot (expr, INSERT);
14102 if (*slot != NULL)
14103 return;
14104 *slot = expr;
14105 code = TREE_CODE (expr);
14106 if (TREE_CODE_CLASS (code) == tcc_declaration
14107 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14108 {
14109 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14110 memcpy ((char *) &buf, expr, tree_size (expr));
14111 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14112 buf.decl_with_vis.symtab_node = NULL;
14113 expr = (tree) &buf;
14114 }
14115 else if (TREE_CODE_CLASS (code) == tcc_type
14116 && (TYPE_POINTER_TO (expr)
14117 || TYPE_REFERENCE_TO (expr)
14118 || TYPE_CACHED_VALUES_P (expr)
14119 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14120 || TYPE_NEXT_VARIANT (expr)))
14121 {
14122 /* Allow these fields to be modified. */
14123 tree tmp;
14124 memcpy ((char *) &buf, expr, tree_size (expr));
14125 expr = tmp = (tree) &buf;
14126 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14127 TYPE_POINTER_TO (tmp) = NULL;
14128 TYPE_REFERENCE_TO (tmp) = NULL;
14129 TYPE_NEXT_VARIANT (tmp) = NULL;
14130 if (TYPE_CACHED_VALUES_P (tmp))
14131 {
14132 TYPE_CACHED_VALUES_P (tmp) = 0;
14133 TYPE_CACHED_VALUES (tmp) = NULL;
14134 }
14135 }
14136 md5_process_bytes (expr, tree_size (expr), ctx);
14137 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14138 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14139 if (TREE_CODE_CLASS (code) != tcc_type
14140 && TREE_CODE_CLASS (code) != tcc_declaration
14141 && code != TREE_LIST
14142 && code != SSA_NAME
14143 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14144 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14145 switch (TREE_CODE_CLASS (code))
14146 {
14147 case tcc_constant:
14148 switch (code)
14149 {
14150 case STRING_CST:
14151 md5_process_bytes (TREE_STRING_POINTER (expr),
14152 TREE_STRING_LENGTH (expr), ctx);
14153 break;
14154 case COMPLEX_CST:
14155 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14156 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14157 break;
14158 case VECTOR_CST:
14159 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14160 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14161 break;
14162 default:
14163 break;
14164 }
14165 break;
14166 case tcc_exceptional:
14167 switch (code)
14168 {
14169 case TREE_LIST:
14170 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14171 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14172 expr = TREE_CHAIN (expr);
14173 goto recursive_label;
14174 break;
14175 case TREE_VEC:
14176 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14177 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14178 break;
14179 default:
14180 break;
14181 }
14182 break;
14183 case tcc_expression:
14184 case tcc_reference:
14185 case tcc_comparison:
14186 case tcc_unary:
14187 case tcc_binary:
14188 case tcc_statement:
14189 case tcc_vl_exp:
14190 len = TREE_OPERAND_LENGTH (expr);
14191 for (i = 0; i < len; ++i)
14192 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14193 break;
14194 case tcc_declaration:
14195 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14196 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14197 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14198 {
14199 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14200 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14201 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14202 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14203 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14204 }
14205
14206 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14207 {
14208 if (TREE_CODE (expr) == FUNCTION_DECL)
14209 {
14210 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14211 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14212 }
14213 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14214 }
14215 break;
14216 case tcc_type:
14217 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14218 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14219 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14220 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14221 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14222 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14223 if (INTEGRAL_TYPE_P (expr)
14224 || SCALAR_FLOAT_TYPE_P (expr))
14225 {
14226 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14227 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14228 }
14229 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14230 if (TREE_CODE (expr) == RECORD_TYPE
14231 || TREE_CODE (expr) == UNION_TYPE
14232 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14233 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14234 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14235 break;
14236 default:
14237 break;
14238 }
14239 }
14240
14241 /* Helper function for outputting the checksum of a tree T. When
14242 debugging with gdb, you can "define mynext" to be "next" followed
14243 by "call debug_fold_checksum (op0)", then just trace down till the
14244 outputs differ. */
14245
14246 DEBUG_FUNCTION void
14247 debug_fold_checksum (const_tree t)
14248 {
14249 int i;
14250 unsigned char checksum[16];
14251 struct md5_ctx ctx;
14252 hash_table<pointer_hash<const tree_node> > ht (32);
14253
14254 md5_init_ctx (&ctx);
14255 fold_checksum_tree (t, &ctx, &ht);
14256 md5_finish_ctx (&ctx, checksum);
14257 ht.empty ();
14258
14259 for (i = 0; i < 16; i++)
14260 fprintf (stderr, "%d ", checksum[i]);
14261
14262 fprintf (stderr, "\n");
14263 }
14264
14265 #endif
14266
14267 /* Fold a unary tree expression with code CODE of type TYPE with an
14268 operand OP0. LOC is the location of the resulting expression.
14269 Return a folded expression if successful. Otherwise, return a tree
14270 expression with code CODE of type TYPE with an operand OP0. */
14271
14272 tree
14273 fold_build1_stat_loc (location_t loc,
14274 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14275 {
14276 tree tem;
14277 #ifdef ENABLE_FOLD_CHECKING
14278 unsigned char checksum_before[16], checksum_after[16];
14279 struct md5_ctx ctx;
14280 hash_table<pointer_hash<const tree_node> > ht (32);
14281
14282 md5_init_ctx (&ctx);
14283 fold_checksum_tree (op0, &ctx, &ht);
14284 md5_finish_ctx (&ctx, checksum_before);
14285 ht.empty ();
14286 #endif
14287
14288 tem = fold_unary_loc (loc, code, type, op0);
14289 if (!tem)
14290 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14291
14292 #ifdef ENABLE_FOLD_CHECKING
14293 md5_init_ctx (&ctx);
14294 fold_checksum_tree (op0, &ctx, &ht);
14295 md5_finish_ctx (&ctx, checksum_after);
14296
14297 if (memcmp (checksum_before, checksum_after, 16))
14298 fold_check_failed (op0, tem);
14299 #endif
14300 return tem;
14301 }
14302
14303 /* Fold a binary tree expression with code CODE of type TYPE with
14304 operands OP0 and OP1. LOC is the location of the resulting
14305 expression. Return a folded expression if successful. Otherwise,
14306 return a tree expression with code CODE of type TYPE with operands
14307 OP0 and OP1. */
14308
14309 tree
14310 fold_build2_stat_loc (location_t loc,
14311 enum tree_code code, tree type, tree op0, tree op1
14312 MEM_STAT_DECL)
14313 {
14314 tree tem;
14315 #ifdef ENABLE_FOLD_CHECKING
14316 unsigned char checksum_before_op0[16],
14317 checksum_before_op1[16],
14318 checksum_after_op0[16],
14319 checksum_after_op1[16];
14320 struct md5_ctx ctx;
14321 hash_table<pointer_hash<const tree_node> > ht (32);
14322
14323 md5_init_ctx (&ctx);
14324 fold_checksum_tree (op0, &ctx, &ht);
14325 md5_finish_ctx (&ctx, checksum_before_op0);
14326 ht.empty ();
14327
14328 md5_init_ctx (&ctx);
14329 fold_checksum_tree (op1, &ctx, &ht);
14330 md5_finish_ctx (&ctx, checksum_before_op1);
14331 ht.empty ();
14332 #endif
14333
14334 tem = fold_binary_loc (loc, code, type, op0, op1);
14335 if (!tem)
14336 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14337
14338 #ifdef ENABLE_FOLD_CHECKING
14339 md5_init_ctx (&ctx);
14340 fold_checksum_tree (op0, &ctx, &ht);
14341 md5_finish_ctx (&ctx, checksum_after_op0);
14342 ht.empty ();
14343
14344 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14345 fold_check_failed (op0, tem);
14346
14347 md5_init_ctx (&ctx);
14348 fold_checksum_tree (op1, &ctx, &ht);
14349 md5_finish_ctx (&ctx, checksum_after_op1);
14350
14351 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14352 fold_check_failed (op1, tem);
14353 #endif
14354 return tem;
14355 }
14356
14357 /* Fold a ternary tree expression with code CODE of type TYPE with
14358 operands OP0, OP1, and OP2. Return a folded expression if
14359 successful. Otherwise, return a tree expression with code CODE of
14360 type TYPE with operands OP0, OP1, and OP2. */
14361
14362 tree
14363 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14364 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14365 {
14366 tree tem;
14367 #ifdef ENABLE_FOLD_CHECKING
14368 unsigned char checksum_before_op0[16],
14369 checksum_before_op1[16],
14370 checksum_before_op2[16],
14371 checksum_after_op0[16],
14372 checksum_after_op1[16],
14373 checksum_after_op2[16];
14374 struct md5_ctx ctx;
14375 hash_table<pointer_hash<const tree_node> > ht (32);
14376
14377 md5_init_ctx (&ctx);
14378 fold_checksum_tree (op0, &ctx, &ht);
14379 md5_finish_ctx (&ctx, checksum_before_op0);
14380 ht.empty ();
14381
14382 md5_init_ctx (&ctx);
14383 fold_checksum_tree (op1, &ctx, &ht);
14384 md5_finish_ctx (&ctx, checksum_before_op1);
14385 ht.empty ();
14386
14387 md5_init_ctx (&ctx);
14388 fold_checksum_tree (op2, &ctx, &ht);
14389 md5_finish_ctx (&ctx, checksum_before_op2);
14390 ht.empty ();
14391 #endif
14392
14393 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14394 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14395 if (!tem)
14396 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14397
14398 #ifdef ENABLE_FOLD_CHECKING
14399 md5_init_ctx (&ctx);
14400 fold_checksum_tree (op0, &ctx, &ht);
14401 md5_finish_ctx (&ctx, checksum_after_op0);
14402 ht.empty ();
14403
14404 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14405 fold_check_failed (op0, tem);
14406
14407 md5_init_ctx (&ctx);
14408 fold_checksum_tree (op1, &ctx, &ht);
14409 md5_finish_ctx (&ctx, checksum_after_op1);
14410 ht.empty ();
14411
14412 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14413 fold_check_failed (op1, tem);
14414
14415 md5_init_ctx (&ctx);
14416 fold_checksum_tree (op2, &ctx, &ht);
14417 md5_finish_ctx (&ctx, checksum_after_op2);
14418
14419 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14420 fold_check_failed (op2, tem);
14421 #endif
14422 return tem;
14423 }
14424
14425 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14426 arguments in ARGARRAY, and a null static chain.
14427 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14428 of type TYPE from the given operands as constructed by build_call_array. */
14429
14430 tree
14431 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14432 int nargs, tree *argarray)
14433 {
14434 tree tem;
14435 #ifdef ENABLE_FOLD_CHECKING
14436 unsigned char checksum_before_fn[16],
14437 checksum_before_arglist[16],
14438 checksum_after_fn[16],
14439 checksum_after_arglist[16];
14440 struct md5_ctx ctx;
14441 hash_table<pointer_hash<const tree_node> > ht (32);
14442 int i;
14443
14444 md5_init_ctx (&ctx);
14445 fold_checksum_tree (fn, &ctx, &ht);
14446 md5_finish_ctx (&ctx, checksum_before_fn);
14447 ht.empty ();
14448
14449 md5_init_ctx (&ctx);
14450 for (i = 0; i < nargs; i++)
14451 fold_checksum_tree (argarray[i], &ctx, &ht);
14452 md5_finish_ctx (&ctx, checksum_before_arglist);
14453 ht.empty ();
14454 #endif
14455
14456 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14457 if (!tem)
14458 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14459
14460 #ifdef ENABLE_FOLD_CHECKING
14461 md5_init_ctx (&ctx);
14462 fold_checksum_tree (fn, &ctx, &ht);
14463 md5_finish_ctx (&ctx, checksum_after_fn);
14464 ht.empty ();
14465
14466 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14467 fold_check_failed (fn, tem);
14468
14469 md5_init_ctx (&ctx);
14470 for (i = 0; i < nargs; i++)
14471 fold_checksum_tree (argarray[i], &ctx, &ht);
14472 md5_finish_ctx (&ctx, checksum_after_arglist);
14473
14474 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14475 fold_check_failed (NULL_TREE, tem);
14476 #endif
14477 return tem;
14478 }
14479
14480 /* Perform constant folding and related simplification of initializer
14481 expression EXPR. These behave identically to "fold_buildN" but ignore
14482 potential run-time traps and exceptions that fold must preserve. */
14483
14484 #define START_FOLD_INIT \
14485 int saved_signaling_nans = flag_signaling_nans;\
14486 int saved_trapping_math = flag_trapping_math;\
14487 int saved_rounding_math = flag_rounding_math;\
14488 int saved_trapv = flag_trapv;\
14489 int saved_folding_initializer = folding_initializer;\
14490 flag_signaling_nans = 0;\
14491 flag_trapping_math = 0;\
14492 flag_rounding_math = 0;\
14493 flag_trapv = 0;\
14494 folding_initializer = 1;
14495
14496 #define END_FOLD_INIT \
14497 flag_signaling_nans = saved_signaling_nans;\
14498 flag_trapping_math = saved_trapping_math;\
14499 flag_rounding_math = saved_rounding_math;\
14500 flag_trapv = saved_trapv;\
14501 folding_initializer = saved_folding_initializer;
14502
14503 tree
14504 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14505 tree type, tree op)
14506 {
14507 tree result;
14508 START_FOLD_INIT;
14509
14510 result = fold_build1_loc (loc, code, type, op);
14511
14512 END_FOLD_INIT;
14513 return result;
14514 }
14515
14516 tree
14517 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14518 tree type, tree op0, tree op1)
14519 {
14520 tree result;
14521 START_FOLD_INIT;
14522
14523 result = fold_build2_loc (loc, code, type, op0, op1);
14524
14525 END_FOLD_INIT;
14526 return result;
14527 }
14528
14529 tree
14530 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14531 int nargs, tree *argarray)
14532 {
14533 tree result;
14534 START_FOLD_INIT;
14535
14536 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14537
14538 END_FOLD_INIT;
14539 return result;
14540 }
14541
14542 #undef START_FOLD_INIT
14543 #undef END_FOLD_INIT
14544
14545 /* Determine if first argument is a multiple of second argument. Return 0 if
14546 it is not, or we cannot easily determined it to be.
14547
14548 An example of the sort of thing we care about (at this point; this routine
14549 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14550 fold cases do now) is discovering that
14551
14552 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14553
14554 is a multiple of
14555
14556 SAVE_EXPR (J * 8)
14557
14558 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14559
14560 This code also handles discovering that
14561
14562 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14563
14564 is a multiple of 8 so we don't have to worry about dealing with a
14565 possible remainder.
14566
14567 Note that we *look* inside a SAVE_EXPR only to determine how it was
14568 calculated; it is not safe for fold to do much of anything else with the
14569 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14570 at run time. For example, the latter example above *cannot* be implemented
14571 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14572 evaluation time of the original SAVE_EXPR is not necessarily the same at
14573 the time the new expression is evaluated. The only optimization of this
14574 sort that would be valid is changing
14575
14576 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14577
14578 divided by 8 to
14579
14580 SAVE_EXPR (I) * SAVE_EXPR (J)
14581
14582 (where the same SAVE_EXPR (J) is used in the original and the
14583 transformed version). */
14584
14585 int
14586 multiple_of_p (tree type, const_tree top, const_tree bottom)
14587 {
14588 if (operand_equal_p (top, bottom, 0))
14589 return 1;
14590
14591 if (TREE_CODE (type) != INTEGER_TYPE)
14592 return 0;
14593
14594 switch (TREE_CODE (top))
14595 {
14596 case BIT_AND_EXPR:
14597 /* Bitwise and provides a power of two multiple. If the mask is
14598 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14599 if (!integer_pow2p (bottom))
14600 return 0;
14601 /* FALLTHRU */
14602
14603 case MULT_EXPR:
14604 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14605 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14606
14607 case PLUS_EXPR:
14608 case MINUS_EXPR:
14609 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14610 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14611
14612 case LSHIFT_EXPR:
14613 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14614 {
14615 tree op1, t1;
14616
14617 op1 = TREE_OPERAND (top, 1);
14618 /* const_binop may not detect overflow correctly,
14619 so check for it explicitly here. */
14620 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14621 && 0 != (t1 = fold_convert (type,
14622 const_binop (LSHIFT_EXPR,
14623 size_one_node,
14624 op1)))
14625 && !TREE_OVERFLOW (t1))
14626 return multiple_of_p (type, t1, bottom);
14627 }
14628 return 0;
14629
14630 case NOP_EXPR:
14631 /* Can't handle conversions from non-integral or wider integral type. */
14632 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14633 || (TYPE_PRECISION (type)
14634 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14635 return 0;
14636
14637 /* .. fall through ... */
14638
14639 case SAVE_EXPR:
14640 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14641
14642 case COND_EXPR:
14643 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14644 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14645
14646 case INTEGER_CST:
14647 if (TREE_CODE (bottom) != INTEGER_CST
14648 || integer_zerop (bottom)
14649 || (TYPE_UNSIGNED (type)
14650 && (tree_int_cst_sgn (top) < 0
14651 || tree_int_cst_sgn (bottom) < 0)))
14652 return 0;
14653 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14654 SIGNED);
14655
14656 default:
14657 return 0;
14658 }
14659 }
14660
14661 /* Return true if CODE or TYPE is known to be non-negative. */
14662
14663 static bool
14664 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14665 {
14666 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14667 && truth_value_p (code))
14668 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14669 have a signed:1 type (where the value is -1 and 0). */
14670 return true;
14671 return false;
14672 }
14673
14674 /* Return true if (CODE OP0) is known to be non-negative. If the return
14675 value is based on the assumption that signed overflow is undefined,
14676 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14677 *STRICT_OVERFLOW_P. */
14678
14679 bool
14680 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14681 bool *strict_overflow_p)
14682 {
14683 if (TYPE_UNSIGNED (type))
14684 return true;
14685
14686 switch (code)
14687 {
14688 case ABS_EXPR:
14689 /* We can't return 1 if flag_wrapv is set because
14690 ABS_EXPR<INT_MIN> = INT_MIN. */
14691 if (!INTEGRAL_TYPE_P (type))
14692 return true;
14693 if (TYPE_OVERFLOW_UNDEFINED (type))
14694 {
14695 *strict_overflow_p = true;
14696 return true;
14697 }
14698 break;
14699
14700 case NON_LVALUE_EXPR:
14701 case FLOAT_EXPR:
14702 case FIX_TRUNC_EXPR:
14703 return tree_expr_nonnegative_warnv_p (op0,
14704 strict_overflow_p);
14705
14706 CASE_CONVERT:
14707 {
14708 tree inner_type = TREE_TYPE (op0);
14709 tree outer_type = type;
14710
14711 if (TREE_CODE (outer_type) == REAL_TYPE)
14712 {
14713 if (TREE_CODE (inner_type) == REAL_TYPE)
14714 return tree_expr_nonnegative_warnv_p (op0,
14715 strict_overflow_p);
14716 if (INTEGRAL_TYPE_P (inner_type))
14717 {
14718 if (TYPE_UNSIGNED (inner_type))
14719 return true;
14720 return tree_expr_nonnegative_warnv_p (op0,
14721 strict_overflow_p);
14722 }
14723 }
14724 else if (INTEGRAL_TYPE_P (outer_type))
14725 {
14726 if (TREE_CODE (inner_type) == REAL_TYPE)
14727 return tree_expr_nonnegative_warnv_p (op0,
14728 strict_overflow_p);
14729 if (INTEGRAL_TYPE_P (inner_type))
14730 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14731 && TYPE_UNSIGNED (inner_type);
14732 }
14733 }
14734 break;
14735
14736 default:
14737 return tree_simple_nonnegative_warnv_p (code, type);
14738 }
14739
14740 /* We don't know sign of `t', so be conservative and return false. */
14741 return false;
14742 }
14743
14744 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14745 value is based on the assumption that signed overflow is undefined,
14746 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14747 *STRICT_OVERFLOW_P. */
14748
14749 bool
14750 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14751 tree op1, bool *strict_overflow_p)
14752 {
14753 if (TYPE_UNSIGNED (type))
14754 return true;
14755
14756 switch (code)
14757 {
14758 case POINTER_PLUS_EXPR:
14759 case PLUS_EXPR:
14760 if (FLOAT_TYPE_P (type))
14761 return (tree_expr_nonnegative_warnv_p (op0,
14762 strict_overflow_p)
14763 && tree_expr_nonnegative_warnv_p (op1,
14764 strict_overflow_p));
14765
14766 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14767 both unsigned and at least 2 bits shorter than the result. */
14768 if (TREE_CODE (type) == INTEGER_TYPE
14769 && TREE_CODE (op0) == NOP_EXPR
14770 && TREE_CODE (op1) == NOP_EXPR)
14771 {
14772 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14773 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14774 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14775 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14776 {
14777 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14778 TYPE_PRECISION (inner2)) + 1;
14779 return prec < TYPE_PRECISION (type);
14780 }
14781 }
14782 break;
14783
14784 case MULT_EXPR:
14785 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14786 {
14787 /* x * x is always non-negative for floating point x
14788 or without overflow. */
14789 if (operand_equal_p (op0, op1, 0)
14790 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14791 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14792 {
14793 if (ANY_INTEGRAL_TYPE_P (type)
14794 && TYPE_OVERFLOW_UNDEFINED (type))
14795 *strict_overflow_p = true;
14796 return true;
14797 }
14798 }
14799
14800 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14801 both unsigned and their total bits is shorter than the result. */
14802 if (TREE_CODE (type) == INTEGER_TYPE
14803 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14804 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14805 {
14806 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14807 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14808 : TREE_TYPE (op0);
14809 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14810 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14811 : TREE_TYPE (op1);
14812
14813 bool unsigned0 = TYPE_UNSIGNED (inner0);
14814 bool unsigned1 = TYPE_UNSIGNED (inner1);
14815
14816 if (TREE_CODE (op0) == INTEGER_CST)
14817 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14818
14819 if (TREE_CODE (op1) == INTEGER_CST)
14820 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14821
14822 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14823 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14824 {
14825 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14826 ? tree_int_cst_min_precision (op0, UNSIGNED)
14827 : TYPE_PRECISION (inner0);
14828
14829 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14830 ? tree_int_cst_min_precision (op1, UNSIGNED)
14831 : TYPE_PRECISION (inner1);
14832
14833 return precision0 + precision1 < TYPE_PRECISION (type);
14834 }
14835 }
14836 return false;
14837
14838 case BIT_AND_EXPR:
14839 case MAX_EXPR:
14840 return (tree_expr_nonnegative_warnv_p (op0,
14841 strict_overflow_p)
14842 || tree_expr_nonnegative_warnv_p (op1,
14843 strict_overflow_p));
14844
14845 case BIT_IOR_EXPR:
14846 case BIT_XOR_EXPR:
14847 case MIN_EXPR:
14848 case RDIV_EXPR:
14849 case TRUNC_DIV_EXPR:
14850 case CEIL_DIV_EXPR:
14851 case FLOOR_DIV_EXPR:
14852 case ROUND_DIV_EXPR:
14853 return (tree_expr_nonnegative_warnv_p (op0,
14854 strict_overflow_p)
14855 && tree_expr_nonnegative_warnv_p (op1,
14856 strict_overflow_p));
14857
14858 case TRUNC_MOD_EXPR:
14859 case CEIL_MOD_EXPR:
14860 case FLOOR_MOD_EXPR:
14861 case ROUND_MOD_EXPR:
14862 return tree_expr_nonnegative_warnv_p (op0,
14863 strict_overflow_p);
14864 default:
14865 return tree_simple_nonnegative_warnv_p (code, type);
14866 }
14867
14868 /* We don't know sign of `t', so be conservative and return false. */
14869 return false;
14870 }
14871
14872 /* Return true if T is known to be non-negative. If the return
14873 value is based on the assumption that signed overflow is undefined,
14874 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14875 *STRICT_OVERFLOW_P. */
14876
14877 bool
14878 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14879 {
14880 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14881 return true;
14882
14883 switch (TREE_CODE (t))
14884 {
14885 case INTEGER_CST:
14886 return tree_int_cst_sgn (t) >= 0;
14887
14888 case REAL_CST:
14889 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14890
14891 case FIXED_CST:
14892 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14893
14894 case COND_EXPR:
14895 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14896 strict_overflow_p)
14897 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14898 strict_overflow_p));
14899 default:
14900 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14901 TREE_TYPE (t));
14902 }
14903 /* We don't know sign of `t', so be conservative and return false. */
14904 return false;
14905 }
14906
14907 /* Return true if T is known to be non-negative. If the return
14908 value is based on the assumption that signed overflow is undefined,
14909 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14910 *STRICT_OVERFLOW_P. */
14911
14912 bool
14913 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14914 tree arg0, tree arg1, bool *strict_overflow_p)
14915 {
14916 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14917 switch (DECL_FUNCTION_CODE (fndecl))
14918 {
14919 CASE_FLT_FN (BUILT_IN_ACOS):
14920 CASE_FLT_FN (BUILT_IN_ACOSH):
14921 CASE_FLT_FN (BUILT_IN_CABS):
14922 CASE_FLT_FN (BUILT_IN_COSH):
14923 CASE_FLT_FN (BUILT_IN_ERFC):
14924 CASE_FLT_FN (BUILT_IN_EXP):
14925 CASE_FLT_FN (BUILT_IN_EXP10):
14926 CASE_FLT_FN (BUILT_IN_EXP2):
14927 CASE_FLT_FN (BUILT_IN_FABS):
14928 CASE_FLT_FN (BUILT_IN_FDIM):
14929 CASE_FLT_FN (BUILT_IN_HYPOT):
14930 CASE_FLT_FN (BUILT_IN_POW10):
14931 CASE_INT_FN (BUILT_IN_FFS):
14932 CASE_INT_FN (BUILT_IN_PARITY):
14933 CASE_INT_FN (BUILT_IN_POPCOUNT):
14934 CASE_INT_FN (BUILT_IN_CLZ):
14935 CASE_INT_FN (BUILT_IN_CLRSB):
14936 case BUILT_IN_BSWAP32:
14937 case BUILT_IN_BSWAP64:
14938 /* Always true. */
14939 return true;
14940
14941 CASE_FLT_FN (BUILT_IN_SQRT):
14942 /* sqrt(-0.0) is -0.0. */
14943 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14944 return true;
14945 return tree_expr_nonnegative_warnv_p (arg0,
14946 strict_overflow_p);
14947
14948 CASE_FLT_FN (BUILT_IN_ASINH):
14949 CASE_FLT_FN (BUILT_IN_ATAN):
14950 CASE_FLT_FN (BUILT_IN_ATANH):
14951 CASE_FLT_FN (BUILT_IN_CBRT):
14952 CASE_FLT_FN (BUILT_IN_CEIL):
14953 CASE_FLT_FN (BUILT_IN_ERF):
14954 CASE_FLT_FN (BUILT_IN_EXPM1):
14955 CASE_FLT_FN (BUILT_IN_FLOOR):
14956 CASE_FLT_FN (BUILT_IN_FMOD):
14957 CASE_FLT_FN (BUILT_IN_FREXP):
14958 CASE_FLT_FN (BUILT_IN_ICEIL):
14959 CASE_FLT_FN (BUILT_IN_IFLOOR):
14960 CASE_FLT_FN (BUILT_IN_IRINT):
14961 CASE_FLT_FN (BUILT_IN_IROUND):
14962 CASE_FLT_FN (BUILT_IN_LCEIL):
14963 CASE_FLT_FN (BUILT_IN_LDEXP):
14964 CASE_FLT_FN (BUILT_IN_LFLOOR):
14965 CASE_FLT_FN (BUILT_IN_LLCEIL):
14966 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14967 CASE_FLT_FN (BUILT_IN_LLRINT):
14968 CASE_FLT_FN (BUILT_IN_LLROUND):
14969 CASE_FLT_FN (BUILT_IN_LRINT):
14970 CASE_FLT_FN (BUILT_IN_LROUND):
14971 CASE_FLT_FN (BUILT_IN_MODF):
14972 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14973 CASE_FLT_FN (BUILT_IN_RINT):
14974 CASE_FLT_FN (BUILT_IN_ROUND):
14975 CASE_FLT_FN (BUILT_IN_SCALB):
14976 CASE_FLT_FN (BUILT_IN_SCALBLN):
14977 CASE_FLT_FN (BUILT_IN_SCALBN):
14978 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14979 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14980 CASE_FLT_FN (BUILT_IN_SINH):
14981 CASE_FLT_FN (BUILT_IN_TANH):
14982 CASE_FLT_FN (BUILT_IN_TRUNC):
14983 /* True if the 1st argument is nonnegative. */
14984 return tree_expr_nonnegative_warnv_p (arg0,
14985 strict_overflow_p);
14986
14987 CASE_FLT_FN (BUILT_IN_FMAX):
14988 /* True if the 1st OR 2nd arguments are nonnegative. */
14989 return (tree_expr_nonnegative_warnv_p (arg0,
14990 strict_overflow_p)
14991 || (tree_expr_nonnegative_warnv_p (arg1,
14992 strict_overflow_p)));
14993
14994 CASE_FLT_FN (BUILT_IN_FMIN):
14995 /* True if the 1st AND 2nd arguments are nonnegative. */
14996 return (tree_expr_nonnegative_warnv_p (arg0,
14997 strict_overflow_p)
14998 && (tree_expr_nonnegative_warnv_p (arg1,
14999 strict_overflow_p)));
15000
15001 CASE_FLT_FN (BUILT_IN_COPYSIGN):
15002 /* True if the 2nd argument is nonnegative. */
15003 return tree_expr_nonnegative_warnv_p (arg1,
15004 strict_overflow_p);
15005
15006 CASE_FLT_FN (BUILT_IN_POWI):
15007 /* True if the 1st argument is nonnegative or the second
15008 argument is an even integer. */
15009 if (TREE_CODE (arg1) == INTEGER_CST
15010 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15011 return true;
15012 return tree_expr_nonnegative_warnv_p (arg0,
15013 strict_overflow_p);
15014
15015 CASE_FLT_FN (BUILT_IN_POW):
15016 /* True if the 1st argument is nonnegative or the second
15017 argument is an even integer valued real. */
15018 if (TREE_CODE (arg1) == REAL_CST)
15019 {
15020 REAL_VALUE_TYPE c;
15021 HOST_WIDE_INT n;
15022
15023 c = TREE_REAL_CST (arg1);
15024 n = real_to_integer (&c);
15025 if ((n & 1) == 0)
15026 {
15027 REAL_VALUE_TYPE cint;
15028 real_from_integer (&cint, VOIDmode, n, SIGNED);
15029 if (real_identical (&c, &cint))
15030 return true;
15031 }
15032 }
15033 return tree_expr_nonnegative_warnv_p (arg0,
15034 strict_overflow_p);
15035
15036 default:
15037 break;
15038 }
15039 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15040 type);
15041 }
15042
15043 /* Return true if T is known to be non-negative. If the return
15044 value is based on the assumption that signed overflow is undefined,
15045 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15046 *STRICT_OVERFLOW_P. */
15047
15048 static bool
15049 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15050 {
15051 enum tree_code code = TREE_CODE (t);
15052 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15053 return true;
15054
15055 switch (code)
15056 {
15057 case TARGET_EXPR:
15058 {
15059 tree temp = TARGET_EXPR_SLOT (t);
15060 t = TARGET_EXPR_INITIAL (t);
15061
15062 /* If the initializer is non-void, then it's a normal expression
15063 that will be assigned to the slot. */
15064 if (!VOID_TYPE_P (t))
15065 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15066
15067 /* Otherwise, the initializer sets the slot in some way. One common
15068 way is an assignment statement at the end of the initializer. */
15069 while (1)
15070 {
15071 if (TREE_CODE (t) == BIND_EXPR)
15072 t = expr_last (BIND_EXPR_BODY (t));
15073 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15074 || TREE_CODE (t) == TRY_CATCH_EXPR)
15075 t = expr_last (TREE_OPERAND (t, 0));
15076 else if (TREE_CODE (t) == STATEMENT_LIST)
15077 t = expr_last (t);
15078 else
15079 break;
15080 }
15081 if (TREE_CODE (t) == MODIFY_EXPR
15082 && TREE_OPERAND (t, 0) == temp)
15083 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15084 strict_overflow_p);
15085
15086 return false;
15087 }
15088
15089 case CALL_EXPR:
15090 {
15091 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15092 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15093
15094 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15095 get_callee_fndecl (t),
15096 arg0,
15097 arg1,
15098 strict_overflow_p);
15099 }
15100 case COMPOUND_EXPR:
15101 case MODIFY_EXPR:
15102 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15103 strict_overflow_p);
15104 case BIND_EXPR:
15105 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15106 strict_overflow_p);
15107 case SAVE_EXPR:
15108 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15109 strict_overflow_p);
15110
15111 default:
15112 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15113 TREE_TYPE (t));
15114 }
15115
15116 /* We don't know sign of `t', so be conservative and return false. */
15117 return false;
15118 }
15119
15120 /* Return true if T is known to be non-negative. If the return
15121 value is based on the assumption that signed overflow is undefined,
15122 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15123 *STRICT_OVERFLOW_P. */
15124
15125 bool
15126 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15127 {
15128 enum tree_code code;
15129 if (t == error_mark_node)
15130 return false;
15131
15132 code = TREE_CODE (t);
15133 switch (TREE_CODE_CLASS (code))
15134 {
15135 case tcc_binary:
15136 case tcc_comparison:
15137 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15138 TREE_TYPE (t),
15139 TREE_OPERAND (t, 0),
15140 TREE_OPERAND (t, 1),
15141 strict_overflow_p);
15142
15143 case tcc_unary:
15144 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15145 TREE_TYPE (t),
15146 TREE_OPERAND (t, 0),
15147 strict_overflow_p);
15148
15149 case tcc_constant:
15150 case tcc_declaration:
15151 case tcc_reference:
15152 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15153
15154 default:
15155 break;
15156 }
15157
15158 switch (code)
15159 {
15160 case TRUTH_AND_EXPR:
15161 case TRUTH_OR_EXPR:
15162 case TRUTH_XOR_EXPR:
15163 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15164 TREE_TYPE (t),
15165 TREE_OPERAND (t, 0),
15166 TREE_OPERAND (t, 1),
15167 strict_overflow_p);
15168 case TRUTH_NOT_EXPR:
15169 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15170 TREE_TYPE (t),
15171 TREE_OPERAND (t, 0),
15172 strict_overflow_p);
15173
15174 case COND_EXPR:
15175 case CONSTRUCTOR:
15176 case OBJ_TYPE_REF:
15177 case ASSERT_EXPR:
15178 case ADDR_EXPR:
15179 case WITH_SIZE_EXPR:
15180 case SSA_NAME:
15181 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15182
15183 default:
15184 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15185 }
15186 }
15187
15188 /* Return true if `t' is known to be non-negative. Handle warnings
15189 about undefined signed overflow. */
15190
15191 bool
15192 tree_expr_nonnegative_p (tree t)
15193 {
15194 bool ret, strict_overflow_p;
15195
15196 strict_overflow_p = false;
15197 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15198 if (strict_overflow_p)
15199 fold_overflow_warning (("assuming signed overflow does not occur when "
15200 "determining that expression is always "
15201 "non-negative"),
15202 WARN_STRICT_OVERFLOW_MISC);
15203 return ret;
15204 }
15205
15206
15207 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15208 For floating point we further ensure that T is not denormal.
15209 Similar logic is present in nonzero_address in rtlanal.h.
15210
15211 If the return value is based on the assumption that signed overflow
15212 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15213 change *STRICT_OVERFLOW_P. */
15214
15215 bool
15216 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15217 bool *strict_overflow_p)
15218 {
15219 switch (code)
15220 {
15221 case ABS_EXPR:
15222 return tree_expr_nonzero_warnv_p (op0,
15223 strict_overflow_p);
15224
15225 case NOP_EXPR:
15226 {
15227 tree inner_type = TREE_TYPE (op0);
15228 tree outer_type = type;
15229
15230 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15231 && tree_expr_nonzero_warnv_p (op0,
15232 strict_overflow_p));
15233 }
15234 break;
15235
15236 case NON_LVALUE_EXPR:
15237 return tree_expr_nonzero_warnv_p (op0,
15238 strict_overflow_p);
15239
15240 default:
15241 break;
15242 }
15243
15244 return false;
15245 }
15246
15247 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15248 For floating point we further ensure that T is not denormal.
15249 Similar logic is present in nonzero_address in rtlanal.h.
15250
15251 If the return value is based on the assumption that signed overflow
15252 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15253 change *STRICT_OVERFLOW_P. */
15254
15255 bool
15256 tree_binary_nonzero_warnv_p (enum tree_code code,
15257 tree type,
15258 tree op0,
15259 tree op1, bool *strict_overflow_p)
15260 {
15261 bool sub_strict_overflow_p;
15262 switch (code)
15263 {
15264 case POINTER_PLUS_EXPR:
15265 case PLUS_EXPR:
15266 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15267 {
15268 /* With the presence of negative values it is hard
15269 to say something. */
15270 sub_strict_overflow_p = false;
15271 if (!tree_expr_nonnegative_warnv_p (op0,
15272 &sub_strict_overflow_p)
15273 || !tree_expr_nonnegative_warnv_p (op1,
15274 &sub_strict_overflow_p))
15275 return false;
15276 /* One of operands must be positive and the other non-negative. */
15277 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15278 overflows, on a twos-complement machine the sum of two
15279 nonnegative numbers can never be zero. */
15280 return (tree_expr_nonzero_warnv_p (op0,
15281 strict_overflow_p)
15282 || tree_expr_nonzero_warnv_p (op1,
15283 strict_overflow_p));
15284 }
15285 break;
15286
15287 case MULT_EXPR:
15288 if (TYPE_OVERFLOW_UNDEFINED (type))
15289 {
15290 if (tree_expr_nonzero_warnv_p (op0,
15291 strict_overflow_p)
15292 && tree_expr_nonzero_warnv_p (op1,
15293 strict_overflow_p))
15294 {
15295 *strict_overflow_p = true;
15296 return true;
15297 }
15298 }
15299 break;
15300
15301 case MIN_EXPR:
15302 sub_strict_overflow_p = false;
15303 if (tree_expr_nonzero_warnv_p (op0,
15304 &sub_strict_overflow_p)
15305 && tree_expr_nonzero_warnv_p (op1,
15306 &sub_strict_overflow_p))
15307 {
15308 if (sub_strict_overflow_p)
15309 *strict_overflow_p = true;
15310 }
15311 break;
15312
15313 case MAX_EXPR:
15314 sub_strict_overflow_p = false;
15315 if (tree_expr_nonzero_warnv_p (op0,
15316 &sub_strict_overflow_p))
15317 {
15318 if (sub_strict_overflow_p)
15319 *strict_overflow_p = true;
15320
15321 /* When both operands are nonzero, then MAX must be too. */
15322 if (tree_expr_nonzero_warnv_p (op1,
15323 strict_overflow_p))
15324 return true;
15325
15326 /* MAX where operand 0 is positive is positive. */
15327 return tree_expr_nonnegative_warnv_p (op0,
15328 strict_overflow_p);
15329 }
15330 /* MAX where operand 1 is positive is positive. */
15331 else if (tree_expr_nonzero_warnv_p (op1,
15332 &sub_strict_overflow_p)
15333 && tree_expr_nonnegative_warnv_p (op1,
15334 &sub_strict_overflow_p))
15335 {
15336 if (sub_strict_overflow_p)
15337 *strict_overflow_p = true;
15338 return true;
15339 }
15340 break;
15341
15342 case BIT_IOR_EXPR:
15343 return (tree_expr_nonzero_warnv_p (op1,
15344 strict_overflow_p)
15345 || tree_expr_nonzero_warnv_p (op0,
15346 strict_overflow_p));
15347
15348 default:
15349 break;
15350 }
15351
15352 return false;
15353 }
15354
15355 /* Return true when T is an address and is known to be nonzero.
15356 For floating point we further ensure that T is not denormal.
15357 Similar logic is present in nonzero_address in rtlanal.h.
15358
15359 If the return value is based on the assumption that signed overflow
15360 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15361 change *STRICT_OVERFLOW_P. */
15362
15363 bool
15364 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15365 {
15366 bool sub_strict_overflow_p;
15367 switch (TREE_CODE (t))
15368 {
15369 case INTEGER_CST:
15370 return !integer_zerop (t);
15371
15372 case ADDR_EXPR:
15373 {
15374 tree base = TREE_OPERAND (t, 0);
15375
15376 if (!DECL_P (base))
15377 base = get_base_address (base);
15378
15379 if (!base)
15380 return false;
15381
15382 /* For objects in symbol table check if we know they are non-zero.
15383 Don't do anything for variables and functions before symtab is built;
15384 it is quite possible that they will be declared weak later. */
15385 if (DECL_P (base) && decl_in_symtab_p (base))
15386 {
15387 struct symtab_node *symbol;
15388
15389 symbol = symtab_node::get_create (base);
15390 if (symbol)
15391 return symbol->nonzero_address ();
15392 else
15393 return false;
15394 }
15395
15396 /* Function local objects are never NULL. */
15397 if (DECL_P (base)
15398 && (DECL_CONTEXT (base)
15399 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15400 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15401 return true;
15402
15403 /* Constants are never weak. */
15404 if (CONSTANT_CLASS_P (base))
15405 return true;
15406
15407 return false;
15408 }
15409
15410 case COND_EXPR:
15411 sub_strict_overflow_p = false;
15412 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15413 &sub_strict_overflow_p)
15414 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15415 &sub_strict_overflow_p))
15416 {
15417 if (sub_strict_overflow_p)
15418 *strict_overflow_p = true;
15419 return true;
15420 }
15421 break;
15422
15423 default:
15424 break;
15425 }
15426 return false;
15427 }
15428
15429 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15430 attempt to fold the expression to a constant without modifying TYPE,
15431 OP0 or OP1.
15432
15433 If the expression could be simplified to a constant, then return
15434 the constant. If the expression would not be simplified to a
15435 constant, then return NULL_TREE. */
15436
15437 tree
15438 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15439 {
15440 tree tem = fold_binary (code, type, op0, op1);
15441 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15442 }
15443
15444 /* Given the components of a unary expression CODE, TYPE and OP0,
15445 attempt to fold the expression to a constant without modifying
15446 TYPE or OP0.
15447
15448 If the expression could be simplified to a constant, then return
15449 the constant. If the expression would not be simplified to a
15450 constant, then return NULL_TREE. */
15451
15452 tree
15453 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15454 {
15455 tree tem = fold_unary (code, type, op0);
15456 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15457 }
15458
15459 /* If EXP represents referencing an element in a constant string
15460 (either via pointer arithmetic or array indexing), return the
15461 tree representing the value accessed, otherwise return NULL. */
15462
15463 tree
15464 fold_read_from_constant_string (tree exp)
15465 {
15466 if ((TREE_CODE (exp) == INDIRECT_REF
15467 || TREE_CODE (exp) == ARRAY_REF)
15468 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15469 {
15470 tree exp1 = TREE_OPERAND (exp, 0);
15471 tree index;
15472 tree string;
15473 location_t loc = EXPR_LOCATION (exp);
15474
15475 if (TREE_CODE (exp) == INDIRECT_REF)
15476 string = string_constant (exp1, &index);
15477 else
15478 {
15479 tree low_bound = array_ref_low_bound (exp);
15480 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15481
15482 /* Optimize the special-case of a zero lower bound.
15483
15484 We convert the low_bound to sizetype to avoid some problems
15485 with constant folding. (E.g. suppose the lower bound is 1,
15486 and its mode is QI. Without the conversion,l (ARRAY
15487 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15488 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15489 if (! integer_zerop (low_bound))
15490 index = size_diffop_loc (loc, index,
15491 fold_convert_loc (loc, sizetype, low_bound));
15492
15493 string = exp1;
15494 }
15495
15496 if (string
15497 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15498 && TREE_CODE (string) == STRING_CST
15499 && TREE_CODE (index) == INTEGER_CST
15500 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15501 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15502 == MODE_INT)
15503 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15504 return build_int_cst_type (TREE_TYPE (exp),
15505 (TREE_STRING_POINTER (string)
15506 [TREE_INT_CST_LOW (index)]));
15507 }
15508 return NULL;
15509 }
15510
15511 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15512 an integer constant, real, or fixed-point constant.
15513
15514 TYPE is the type of the result. */
15515
15516 static tree
15517 fold_negate_const (tree arg0, tree type)
15518 {
15519 tree t = NULL_TREE;
15520
15521 switch (TREE_CODE (arg0))
15522 {
15523 case INTEGER_CST:
15524 {
15525 bool overflow;
15526 wide_int val = wi::neg (arg0, &overflow);
15527 t = force_fit_type (type, val, 1,
15528 (overflow | TREE_OVERFLOW (arg0))
15529 && !TYPE_UNSIGNED (type));
15530 break;
15531 }
15532
15533 case REAL_CST:
15534 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15535 break;
15536
15537 case FIXED_CST:
15538 {
15539 FIXED_VALUE_TYPE f;
15540 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15541 &(TREE_FIXED_CST (arg0)), NULL,
15542 TYPE_SATURATING (type));
15543 t = build_fixed (type, f);
15544 /* Propagate overflow flags. */
15545 if (overflow_p | TREE_OVERFLOW (arg0))
15546 TREE_OVERFLOW (t) = 1;
15547 break;
15548 }
15549
15550 default:
15551 gcc_unreachable ();
15552 }
15553
15554 return t;
15555 }
15556
15557 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15558 an integer constant or real constant.
15559
15560 TYPE is the type of the result. */
15561
15562 tree
15563 fold_abs_const (tree arg0, tree type)
15564 {
15565 tree t = NULL_TREE;
15566
15567 switch (TREE_CODE (arg0))
15568 {
15569 case INTEGER_CST:
15570 {
15571 /* If the value is unsigned or non-negative, then the absolute value
15572 is the same as the ordinary value. */
15573 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15574 t = arg0;
15575
15576 /* If the value is negative, then the absolute value is
15577 its negation. */
15578 else
15579 {
15580 bool overflow;
15581 wide_int val = wi::neg (arg0, &overflow);
15582 t = force_fit_type (type, val, -1,
15583 overflow | TREE_OVERFLOW (arg0));
15584 }
15585 }
15586 break;
15587
15588 case REAL_CST:
15589 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15590 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15591 else
15592 t = arg0;
15593 break;
15594
15595 default:
15596 gcc_unreachable ();
15597 }
15598
15599 return t;
15600 }
15601
15602 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15603 constant. TYPE is the type of the result. */
15604
15605 static tree
15606 fold_not_const (const_tree arg0, tree type)
15607 {
15608 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15609
15610 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15611 }
15612
15613 /* Given CODE, a relational operator, the target type, TYPE and two
15614 constant operands OP0 and OP1, return the result of the
15615 relational operation. If the result is not a compile time
15616 constant, then return NULL_TREE. */
15617
15618 static tree
15619 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15620 {
15621 int result, invert;
15622
15623 /* From here on, the only cases we handle are when the result is
15624 known to be a constant. */
15625
15626 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15627 {
15628 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15629 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15630
15631 /* Handle the cases where either operand is a NaN. */
15632 if (real_isnan (c0) || real_isnan (c1))
15633 {
15634 switch (code)
15635 {
15636 case EQ_EXPR:
15637 case ORDERED_EXPR:
15638 result = 0;
15639 break;
15640
15641 case NE_EXPR:
15642 case UNORDERED_EXPR:
15643 case UNLT_EXPR:
15644 case UNLE_EXPR:
15645 case UNGT_EXPR:
15646 case UNGE_EXPR:
15647 case UNEQ_EXPR:
15648 result = 1;
15649 break;
15650
15651 case LT_EXPR:
15652 case LE_EXPR:
15653 case GT_EXPR:
15654 case GE_EXPR:
15655 case LTGT_EXPR:
15656 if (flag_trapping_math)
15657 return NULL_TREE;
15658 result = 0;
15659 break;
15660
15661 default:
15662 gcc_unreachable ();
15663 }
15664
15665 return constant_boolean_node (result, type);
15666 }
15667
15668 return constant_boolean_node (real_compare (code, c0, c1), type);
15669 }
15670
15671 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15672 {
15673 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15674 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15675 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15676 }
15677
15678 /* Handle equality/inequality of complex constants. */
15679 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15680 {
15681 tree rcond = fold_relational_const (code, type,
15682 TREE_REALPART (op0),
15683 TREE_REALPART (op1));
15684 tree icond = fold_relational_const (code, type,
15685 TREE_IMAGPART (op0),
15686 TREE_IMAGPART (op1));
15687 if (code == EQ_EXPR)
15688 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15689 else if (code == NE_EXPR)
15690 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15691 else
15692 return NULL_TREE;
15693 }
15694
15695 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15696 {
15697 unsigned count = VECTOR_CST_NELTS (op0);
15698 tree *elts = XALLOCAVEC (tree, count);
15699 gcc_assert (VECTOR_CST_NELTS (op1) == count
15700 && TYPE_VECTOR_SUBPARTS (type) == count);
15701
15702 for (unsigned i = 0; i < count; i++)
15703 {
15704 tree elem_type = TREE_TYPE (type);
15705 tree elem0 = VECTOR_CST_ELT (op0, i);
15706 tree elem1 = VECTOR_CST_ELT (op1, i);
15707
15708 tree tem = fold_relational_const (code, elem_type,
15709 elem0, elem1);
15710
15711 if (tem == NULL_TREE)
15712 return NULL_TREE;
15713
15714 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15715 }
15716
15717 return build_vector (type, elts);
15718 }
15719
15720 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15721
15722 To compute GT, swap the arguments and do LT.
15723 To compute GE, do LT and invert the result.
15724 To compute LE, swap the arguments, do LT and invert the result.
15725 To compute NE, do EQ and invert the result.
15726
15727 Therefore, the code below must handle only EQ and LT. */
15728
15729 if (code == LE_EXPR || code == GT_EXPR)
15730 {
15731 tree tem = op0;
15732 op0 = op1;
15733 op1 = tem;
15734 code = swap_tree_comparison (code);
15735 }
15736
15737 /* Note that it is safe to invert for real values here because we
15738 have already handled the one case that it matters. */
15739
15740 invert = 0;
15741 if (code == NE_EXPR || code == GE_EXPR)
15742 {
15743 invert = 1;
15744 code = invert_tree_comparison (code, false);
15745 }
15746
15747 /* Compute a result for LT or EQ if args permit;
15748 Otherwise return T. */
15749 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15750 {
15751 if (code == EQ_EXPR)
15752 result = tree_int_cst_equal (op0, op1);
15753 else
15754 result = tree_int_cst_lt (op0, op1);
15755 }
15756 else
15757 return NULL_TREE;
15758
15759 if (invert)
15760 result ^= 1;
15761 return constant_boolean_node (result, type);
15762 }
15763
15764 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15765 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15766 itself. */
15767
15768 tree
15769 fold_build_cleanup_point_expr (tree type, tree expr)
15770 {
15771 /* If the expression does not have side effects then we don't have to wrap
15772 it with a cleanup point expression. */
15773 if (!TREE_SIDE_EFFECTS (expr))
15774 return expr;
15775
15776 /* If the expression is a return, check to see if the expression inside the
15777 return has no side effects or the right hand side of the modify expression
15778 inside the return. If either don't have side effects set we don't need to
15779 wrap the expression in a cleanup point expression. Note we don't check the
15780 left hand side of the modify because it should always be a return decl. */
15781 if (TREE_CODE (expr) == RETURN_EXPR)
15782 {
15783 tree op = TREE_OPERAND (expr, 0);
15784 if (!op || !TREE_SIDE_EFFECTS (op))
15785 return expr;
15786 op = TREE_OPERAND (op, 1);
15787 if (!TREE_SIDE_EFFECTS (op))
15788 return expr;
15789 }
15790
15791 return build1 (CLEANUP_POINT_EXPR, type, expr);
15792 }
15793
15794 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15795 of an indirection through OP0, or NULL_TREE if no simplification is
15796 possible. */
15797
15798 tree
15799 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15800 {
15801 tree sub = op0;
15802 tree subtype;
15803
15804 STRIP_NOPS (sub);
15805 subtype = TREE_TYPE (sub);
15806 if (!POINTER_TYPE_P (subtype))
15807 return NULL_TREE;
15808
15809 if (TREE_CODE (sub) == ADDR_EXPR)
15810 {
15811 tree op = TREE_OPERAND (sub, 0);
15812 tree optype = TREE_TYPE (op);
15813 /* *&CONST_DECL -> to the value of the const decl. */
15814 if (TREE_CODE (op) == CONST_DECL)
15815 return DECL_INITIAL (op);
15816 /* *&p => p; make sure to handle *&"str"[cst] here. */
15817 if (type == optype)
15818 {
15819 tree fop = fold_read_from_constant_string (op);
15820 if (fop)
15821 return fop;
15822 else
15823 return op;
15824 }
15825 /* *(foo *)&fooarray => fooarray[0] */
15826 else if (TREE_CODE (optype) == ARRAY_TYPE
15827 && type == TREE_TYPE (optype)
15828 && (!in_gimple_form
15829 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15830 {
15831 tree type_domain = TYPE_DOMAIN (optype);
15832 tree min_val = size_zero_node;
15833 if (type_domain && TYPE_MIN_VALUE (type_domain))
15834 min_val = TYPE_MIN_VALUE (type_domain);
15835 if (in_gimple_form
15836 && TREE_CODE (min_val) != INTEGER_CST)
15837 return NULL_TREE;
15838 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15839 NULL_TREE, NULL_TREE);
15840 }
15841 /* *(foo *)&complexfoo => __real__ complexfoo */
15842 else if (TREE_CODE (optype) == COMPLEX_TYPE
15843 && type == TREE_TYPE (optype))
15844 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15845 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15846 else if (TREE_CODE (optype) == VECTOR_TYPE
15847 && type == TREE_TYPE (optype))
15848 {
15849 tree part_width = TYPE_SIZE (type);
15850 tree index = bitsize_int (0);
15851 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15852 }
15853 }
15854
15855 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15856 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15857 {
15858 tree op00 = TREE_OPERAND (sub, 0);
15859 tree op01 = TREE_OPERAND (sub, 1);
15860
15861 STRIP_NOPS (op00);
15862 if (TREE_CODE (op00) == ADDR_EXPR)
15863 {
15864 tree op00type;
15865 op00 = TREE_OPERAND (op00, 0);
15866 op00type = TREE_TYPE (op00);
15867
15868 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15869 if (TREE_CODE (op00type) == VECTOR_TYPE
15870 && type == TREE_TYPE (op00type))
15871 {
15872 HOST_WIDE_INT offset = tree_to_shwi (op01);
15873 tree part_width = TYPE_SIZE (type);
15874 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15875 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15876 tree index = bitsize_int (indexi);
15877
15878 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15879 return fold_build3_loc (loc,
15880 BIT_FIELD_REF, type, op00,
15881 part_width, index);
15882
15883 }
15884 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15885 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15886 && type == TREE_TYPE (op00type))
15887 {
15888 tree size = TYPE_SIZE_UNIT (type);
15889 if (tree_int_cst_equal (size, op01))
15890 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15891 }
15892 /* ((foo *)&fooarray)[1] => fooarray[1] */
15893 else if (TREE_CODE (op00type) == ARRAY_TYPE
15894 && type == TREE_TYPE (op00type))
15895 {
15896 tree type_domain = TYPE_DOMAIN (op00type);
15897 tree min_val = size_zero_node;
15898 if (type_domain && TYPE_MIN_VALUE (type_domain))
15899 min_val = TYPE_MIN_VALUE (type_domain);
15900 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15901 TYPE_SIZE_UNIT (type));
15902 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15903 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15904 NULL_TREE, NULL_TREE);
15905 }
15906 }
15907 }
15908
15909 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15910 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15911 && type == TREE_TYPE (TREE_TYPE (subtype))
15912 && (!in_gimple_form
15913 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15914 {
15915 tree type_domain;
15916 tree min_val = size_zero_node;
15917 sub = build_fold_indirect_ref_loc (loc, sub);
15918 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15919 if (type_domain && TYPE_MIN_VALUE (type_domain))
15920 min_val = TYPE_MIN_VALUE (type_domain);
15921 if (in_gimple_form
15922 && TREE_CODE (min_val) != INTEGER_CST)
15923 return NULL_TREE;
15924 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15925 NULL_TREE);
15926 }
15927
15928 return NULL_TREE;
15929 }
15930
15931 /* Builds an expression for an indirection through T, simplifying some
15932 cases. */
15933
15934 tree
15935 build_fold_indirect_ref_loc (location_t loc, tree t)
15936 {
15937 tree type = TREE_TYPE (TREE_TYPE (t));
15938 tree sub = fold_indirect_ref_1 (loc, type, t);
15939
15940 if (sub)
15941 return sub;
15942
15943 return build1_loc (loc, INDIRECT_REF, type, t);
15944 }
15945
15946 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15947
15948 tree
15949 fold_indirect_ref_loc (location_t loc, tree t)
15950 {
15951 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15952
15953 if (sub)
15954 return sub;
15955 else
15956 return t;
15957 }
15958
15959 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15960 whose result is ignored. The type of the returned tree need not be
15961 the same as the original expression. */
15962
15963 tree
15964 fold_ignored_result (tree t)
15965 {
15966 if (!TREE_SIDE_EFFECTS (t))
15967 return integer_zero_node;
15968
15969 for (;;)
15970 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15971 {
15972 case tcc_unary:
15973 t = TREE_OPERAND (t, 0);
15974 break;
15975
15976 case tcc_binary:
15977 case tcc_comparison:
15978 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15979 t = TREE_OPERAND (t, 0);
15980 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15981 t = TREE_OPERAND (t, 1);
15982 else
15983 return t;
15984 break;
15985
15986 case tcc_expression:
15987 switch (TREE_CODE (t))
15988 {
15989 case COMPOUND_EXPR:
15990 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15991 return t;
15992 t = TREE_OPERAND (t, 0);
15993 break;
15994
15995 case COND_EXPR:
15996 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15997 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15998 return t;
15999 t = TREE_OPERAND (t, 0);
16000 break;
16001
16002 default:
16003 return t;
16004 }
16005 break;
16006
16007 default:
16008 return t;
16009 }
16010 }
16011
16012 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16013
16014 tree
16015 round_up_loc (location_t loc, tree value, unsigned int divisor)
16016 {
16017 tree div = NULL_TREE;
16018
16019 if (divisor == 1)
16020 return value;
16021
16022 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16023 have to do anything. Only do this when we are not given a const,
16024 because in that case, this check is more expensive than just
16025 doing it. */
16026 if (TREE_CODE (value) != INTEGER_CST)
16027 {
16028 div = build_int_cst (TREE_TYPE (value), divisor);
16029
16030 if (multiple_of_p (TREE_TYPE (value), value, div))
16031 return value;
16032 }
16033
16034 /* If divisor is a power of two, simplify this to bit manipulation. */
16035 if (divisor == (divisor & -divisor))
16036 {
16037 if (TREE_CODE (value) == INTEGER_CST)
16038 {
16039 wide_int val = value;
16040 bool overflow_p;
16041
16042 if ((val & (divisor - 1)) == 0)
16043 return value;
16044
16045 overflow_p = TREE_OVERFLOW (value);
16046 val += divisor - 1;
16047 val &= - (int) divisor;
16048 if (val == 0)
16049 overflow_p = true;
16050
16051 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16052 }
16053 else
16054 {
16055 tree t;
16056
16057 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16058 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16059 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16060 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16061 }
16062 }
16063 else
16064 {
16065 if (!div)
16066 div = build_int_cst (TREE_TYPE (value), divisor);
16067 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16068 value = size_binop_loc (loc, MULT_EXPR, value, div);
16069 }
16070
16071 return value;
16072 }
16073
16074 /* Likewise, but round down. */
16075
16076 tree
16077 round_down_loc (location_t loc, tree value, int divisor)
16078 {
16079 tree div = NULL_TREE;
16080
16081 gcc_assert (divisor > 0);
16082 if (divisor == 1)
16083 return value;
16084
16085 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16086 have to do anything. Only do this when we are not given a const,
16087 because in that case, this check is more expensive than just
16088 doing it. */
16089 if (TREE_CODE (value) != INTEGER_CST)
16090 {
16091 div = build_int_cst (TREE_TYPE (value), divisor);
16092
16093 if (multiple_of_p (TREE_TYPE (value), value, div))
16094 return value;
16095 }
16096
16097 /* If divisor is a power of two, simplify this to bit manipulation. */
16098 if (divisor == (divisor & -divisor))
16099 {
16100 tree t;
16101
16102 t = build_int_cst (TREE_TYPE (value), -divisor);
16103 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16104 }
16105 else
16106 {
16107 if (!div)
16108 div = build_int_cst (TREE_TYPE (value), divisor);
16109 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16110 value = size_binop_loc (loc, MULT_EXPR, value, div);
16111 }
16112
16113 return value;
16114 }
16115
16116 /* Returns the pointer to the base of the object addressed by EXP and
16117 extracts the information about the offset of the access, storing it
16118 to PBITPOS and POFFSET. */
16119
16120 static tree
16121 split_address_to_core_and_offset (tree exp,
16122 HOST_WIDE_INT *pbitpos, tree *poffset)
16123 {
16124 tree core;
16125 machine_mode mode;
16126 int unsignedp, volatilep;
16127 HOST_WIDE_INT bitsize;
16128 location_t loc = EXPR_LOCATION (exp);
16129
16130 if (TREE_CODE (exp) == ADDR_EXPR)
16131 {
16132 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16133 poffset, &mode, &unsignedp, &volatilep,
16134 false);
16135 core = build_fold_addr_expr_loc (loc, core);
16136 }
16137 else
16138 {
16139 core = exp;
16140 *pbitpos = 0;
16141 *poffset = NULL_TREE;
16142 }
16143
16144 return core;
16145 }
16146
16147 /* Returns true if addresses of E1 and E2 differ by a constant, false
16148 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16149
16150 bool
16151 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16152 {
16153 tree core1, core2;
16154 HOST_WIDE_INT bitpos1, bitpos2;
16155 tree toffset1, toffset2, tdiff, type;
16156
16157 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16158 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16159
16160 if (bitpos1 % BITS_PER_UNIT != 0
16161 || bitpos2 % BITS_PER_UNIT != 0
16162 || !operand_equal_p (core1, core2, 0))
16163 return false;
16164
16165 if (toffset1 && toffset2)
16166 {
16167 type = TREE_TYPE (toffset1);
16168 if (type != TREE_TYPE (toffset2))
16169 toffset2 = fold_convert (type, toffset2);
16170
16171 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16172 if (!cst_and_fits_in_hwi (tdiff))
16173 return false;
16174
16175 *diff = int_cst_value (tdiff);
16176 }
16177 else if (toffset1 || toffset2)
16178 {
16179 /* If only one of the offsets is non-constant, the difference cannot
16180 be a constant. */
16181 return false;
16182 }
16183 else
16184 *diff = 0;
16185
16186 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16187 return true;
16188 }
16189
16190 /* Simplify the floating point expression EXP when the sign of the
16191 result is not significant. Return NULL_TREE if no simplification
16192 is possible. */
16193
16194 tree
16195 fold_strip_sign_ops (tree exp)
16196 {
16197 tree arg0, arg1;
16198 location_t loc = EXPR_LOCATION (exp);
16199
16200 switch (TREE_CODE (exp))
16201 {
16202 case ABS_EXPR:
16203 case NEGATE_EXPR:
16204 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16205 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16206
16207 case MULT_EXPR:
16208 case RDIV_EXPR:
16209 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16210 return NULL_TREE;
16211 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16212 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16213 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16214 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16215 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16216 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16217 break;
16218
16219 case COMPOUND_EXPR:
16220 arg0 = TREE_OPERAND (exp, 0);
16221 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16222 if (arg1)
16223 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16224 break;
16225
16226 case COND_EXPR:
16227 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16228 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16229 if (arg0 || arg1)
16230 return fold_build3_loc (loc,
16231 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16232 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16233 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16234 break;
16235
16236 case CALL_EXPR:
16237 {
16238 const enum built_in_function fcode = builtin_mathfn_code (exp);
16239 switch (fcode)
16240 {
16241 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16242 /* Strip copysign function call, return the 1st argument. */
16243 arg0 = CALL_EXPR_ARG (exp, 0);
16244 arg1 = CALL_EXPR_ARG (exp, 1);
16245 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16246
16247 default:
16248 /* Strip sign ops from the argument of "odd" math functions. */
16249 if (negate_mathfn_p (fcode))
16250 {
16251 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16252 if (arg0)
16253 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16254 }
16255 break;
16256 }
16257 }
16258 break;
16259
16260 default:
16261 break;
16262 }
16263 return NULL_TREE;
16264 }
16265
16266 /* Return OFF converted to a pointer offset type suitable as offset for
16267 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16268 tree
16269 convert_to_ptrofftype_loc (location_t loc, tree off)
16270 {
16271 return fold_convert_loc (loc, sizetype, off);
16272 }
16273
16274 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16275 tree
16276 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16277 {
16278 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16279 ptr, convert_to_ptrofftype_loc (loc, off));
16280 }
16281
16282 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16283 tree
16284 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16285 {
16286 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16287 ptr, size_int (off));
16288 }