re PR ipa/65270 (issues with merging memory accesses from different code paths)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "hash-set.h"
49 #include "machmode.h"
50 #include "vec.h"
51 #include "double-int.h"
52 #include "input.h"
53 #include "alias.h"
54 #include "symtab.h"
55 #include "wide-int.h"
56 #include "inchash.h"
57 #include "tree.h"
58 #include "fold-const.h"
59 #include "stor-layout.h"
60 #include "calls.h"
61 #include "tree-iterator.h"
62 #include "realmpfr.h"
63 #include "rtl.h"
64 #include "hashtab.h"
65 #include "hard-reg-set.h"
66 #include "function.h"
67 #include "statistics.h"
68 #include "real.h"
69 #include "fixed-value.h"
70 #include "insn-config.h"
71 #include "expmed.h"
72 #include "dojump.h"
73 #include "explow.h"
74 #include "emit-rtl.h"
75 #include "varasm.h"
76 #include "stmt.h"
77 #include "expr.h"
78 #include "tm_p.h"
79 #include "target.h"
80 #include "diagnostic-core.h"
81 #include "intl.h"
82 #include "langhooks.h"
83 #include "md5.h"
84 #include "predict.h"
85 #include "basic-block.h"
86 #include "tree-ssa-alias.h"
87 #include "internal-fn.h"
88 #include "tree-eh.h"
89 #include "gimple-expr.h"
90 #include "is-a.h"
91 #include "gimple.h"
92 #include "gimplify.h"
93 #include "tree-dfa.h"
94 #include "hash-table.h" /* Required for ENABLE_FOLD_CHECKING. */
95 #include "builtins.h"
96 #include "hash-map.h"
97 #include "plugin-api.h"
98 #include "ipa-ref.h"
99 #include "cgraph.h"
100 #include "generic-match.h"
101 #include "optabs.h"
102
103 /* Nonzero if we are folding constants inside an initializer; zero
104 otherwise. */
105 int folding_initializer = 0;
106
107 /* The following constants represent a bit based encoding of GCC's
108 comparison operators. This encoding simplifies transformations
109 on relational comparison operators, such as AND and OR. */
110 enum comparison_code {
111 COMPCODE_FALSE = 0,
112 COMPCODE_LT = 1,
113 COMPCODE_EQ = 2,
114 COMPCODE_LE = 3,
115 COMPCODE_GT = 4,
116 COMPCODE_LTGT = 5,
117 COMPCODE_GE = 6,
118 COMPCODE_ORD = 7,
119 COMPCODE_UNORD = 8,
120 COMPCODE_UNLT = 9,
121 COMPCODE_UNEQ = 10,
122 COMPCODE_UNLE = 11,
123 COMPCODE_UNGT = 12,
124 COMPCODE_NE = 13,
125 COMPCODE_UNGE = 14,
126 COMPCODE_TRUE = 15
127 };
128
129 static bool negate_mathfn_p (enum built_in_function);
130 static bool negate_expr_p (tree);
131 static tree negate_expr (tree);
132 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
133 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
134 static enum comparison_code comparison_to_compcode (enum tree_code);
135 static enum tree_code compcode_to_comparison (enum comparison_code);
136 static int operand_equal_for_comparison_p (tree, tree, tree);
137 static int twoval_comparison_p (tree, tree *, tree *, int *);
138 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
139 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
140 static tree make_bit_field_ref (location_t, tree, tree,
141 HOST_WIDE_INT, HOST_WIDE_INT, int);
142 static tree optimize_bit_field_compare (location_t, enum tree_code,
143 tree, tree, tree);
144 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
145 HOST_WIDE_INT *,
146 machine_mode *, int *, int *,
147 tree *, tree *);
148 static int simple_operand_p (const_tree);
149 static bool simple_operand_p_2 (tree);
150 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
151 static tree range_predecessor (tree);
152 static tree range_successor (tree);
153 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
154 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
155 static tree unextend (tree, int, int, tree);
156 static tree optimize_minmax_comparison (location_t, enum tree_code,
157 tree, tree, tree);
158 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
159 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
160 static tree fold_binary_op_with_conditional_arg (location_t,
161 enum tree_code, tree,
162 tree, tree,
163 tree, tree, int);
164 static tree fold_mathfn_compare (location_t,
165 enum built_in_function, enum tree_code,
166 tree, tree, tree);
167 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
168 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
169 static bool reorder_operands_p (const_tree, const_tree);
170 static tree fold_negate_const (tree, tree);
171 static tree fold_not_const (const_tree, tree);
172 static tree fold_relational_const (enum tree_code, tree, tree, tree);
173 static tree fold_convert_const (enum tree_code, tree, tree);
174 static tree fold_view_convert_expr (tree, tree);
175 static bool vec_cst_ctor_to_array (tree, tree *);
176
177
178 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
179 Otherwise, return LOC. */
180
181 static location_t
182 expr_location_or (tree t, location_t loc)
183 {
184 location_t tloc = EXPR_LOCATION (t);
185 return tloc == UNKNOWN_LOCATION ? loc : tloc;
186 }
187
188 /* Similar to protected_set_expr_location, but never modify x in place,
189 if location can and needs to be set, unshare it. */
190
191 static inline tree
192 protected_set_expr_location_unshare (tree x, location_t loc)
193 {
194 if (CAN_HAVE_LOCATION_P (x)
195 && EXPR_LOCATION (x) != loc
196 && !(TREE_CODE (x) == SAVE_EXPR
197 || TREE_CODE (x) == TARGET_EXPR
198 || TREE_CODE (x) == BIND_EXPR))
199 {
200 x = copy_node (x);
201 SET_EXPR_LOCATION (x, loc);
202 }
203 return x;
204 }
205 \f
206 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
207 division and returns the quotient. Otherwise returns
208 NULL_TREE. */
209
210 tree
211 div_if_zero_remainder (const_tree arg1, const_tree arg2)
212 {
213 widest_int quo;
214
215 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
216 SIGNED, &quo))
217 return wide_int_to_tree (TREE_TYPE (arg1), quo);
218
219 return NULL_TREE;
220 }
221 \f
222 /* This is nonzero if we should defer warnings about undefined
223 overflow. This facility exists because these warnings are a
224 special case. The code to estimate loop iterations does not want
225 to issue any warnings, since it works with expressions which do not
226 occur in user code. Various bits of cleanup code call fold(), but
227 only use the result if it has certain characteristics (e.g., is a
228 constant); that code only wants to issue a warning if the result is
229 used. */
230
231 static int fold_deferring_overflow_warnings;
232
233 /* If a warning about undefined overflow is deferred, this is the
234 warning. Note that this may cause us to turn two warnings into
235 one, but that is fine since it is sufficient to only give one
236 warning per expression. */
237
238 static const char* fold_deferred_overflow_warning;
239
240 /* If a warning about undefined overflow is deferred, this is the
241 level at which the warning should be emitted. */
242
243 static enum warn_strict_overflow_code fold_deferred_overflow_code;
244
245 /* Start deferring overflow warnings. We could use a stack here to
246 permit nested calls, but at present it is not necessary. */
247
248 void
249 fold_defer_overflow_warnings (void)
250 {
251 ++fold_deferring_overflow_warnings;
252 }
253
254 /* Stop deferring overflow warnings. If there is a pending warning,
255 and ISSUE is true, then issue the warning if appropriate. STMT is
256 the statement with which the warning should be associated (used for
257 location information); STMT may be NULL. CODE is the level of the
258 warning--a warn_strict_overflow_code value. This function will use
259 the smaller of CODE and the deferred code when deciding whether to
260 issue the warning. CODE may be zero to mean to always use the
261 deferred code. */
262
263 void
264 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
265 {
266 const char *warnmsg;
267 location_t locus;
268
269 gcc_assert (fold_deferring_overflow_warnings > 0);
270 --fold_deferring_overflow_warnings;
271 if (fold_deferring_overflow_warnings > 0)
272 {
273 if (fold_deferred_overflow_warning != NULL
274 && code != 0
275 && code < (int) fold_deferred_overflow_code)
276 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
277 return;
278 }
279
280 warnmsg = fold_deferred_overflow_warning;
281 fold_deferred_overflow_warning = NULL;
282
283 if (!issue || warnmsg == NULL)
284 return;
285
286 if (gimple_no_warning_p (stmt))
287 return;
288
289 /* Use the smallest code level when deciding to issue the
290 warning. */
291 if (code == 0 || code > (int) fold_deferred_overflow_code)
292 code = fold_deferred_overflow_code;
293
294 if (!issue_strict_overflow_warning (code))
295 return;
296
297 if (stmt == NULL)
298 locus = input_location;
299 else
300 locus = gimple_location (stmt);
301 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
302 }
303
304 /* Stop deferring overflow warnings, ignoring any deferred
305 warnings. */
306
307 void
308 fold_undefer_and_ignore_overflow_warnings (void)
309 {
310 fold_undefer_overflow_warnings (false, NULL, 0);
311 }
312
313 /* Whether we are deferring overflow warnings. */
314
315 bool
316 fold_deferring_overflow_warnings_p (void)
317 {
318 return fold_deferring_overflow_warnings > 0;
319 }
320
321 /* This is called when we fold something based on the fact that signed
322 overflow is undefined. */
323
324 static void
325 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
326 {
327 if (fold_deferring_overflow_warnings > 0)
328 {
329 if (fold_deferred_overflow_warning == NULL
330 || wc < fold_deferred_overflow_code)
331 {
332 fold_deferred_overflow_warning = gmsgid;
333 fold_deferred_overflow_code = wc;
334 }
335 }
336 else if (issue_strict_overflow_warning (wc))
337 warning (OPT_Wstrict_overflow, gmsgid);
338 }
339 \f
340 /* Return true if the built-in mathematical function specified by CODE
341 is odd, i.e. -f(x) == f(-x). */
342
343 static bool
344 negate_mathfn_p (enum built_in_function code)
345 {
346 switch (code)
347 {
348 CASE_FLT_FN (BUILT_IN_ASIN):
349 CASE_FLT_FN (BUILT_IN_ASINH):
350 CASE_FLT_FN (BUILT_IN_ATAN):
351 CASE_FLT_FN (BUILT_IN_ATANH):
352 CASE_FLT_FN (BUILT_IN_CASIN):
353 CASE_FLT_FN (BUILT_IN_CASINH):
354 CASE_FLT_FN (BUILT_IN_CATAN):
355 CASE_FLT_FN (BUILT_IN_CATANH):
356 CASE_FLT_FN (BUILT_IN_CBRT):
357 CASE_FLT_FN (BUILT_IN_CPROJ):
358 CASE_FLT_FN (BUILT_IN_CSIN):
359 CASE_FLT_FN (BUILT_IN_CSINH):
360 CASE_FLT_FN (BUILT_IN_CTAN):
361 CASE_FLT_FN (BUILT_IN_CTANH):
362 CASE_FLT_FN (BUILT_IN_ERF):
363 CASE_FLT_FN (BUILT_IN_LLROUND):
364 CASE_FLT_FN (BUILT_IN_LROUND):
365 CASE_FLT_FN (BUILT_IN_ROUND):
366 CASE_FLT_FN (BUILT_IN_SIN):
367 CASE_FLT_FN (BUILT_IN_SINH):
368 CASE_FLT_FN (BUILT_IN_TAN):
369 CASE_FLT_FN (BUILT_IN_TANH):
370 CASE_FLT_FN (BUILT_IN_TRUNC):
371 return true;
372
373 CASE_FLT_FN (BUILT_IN_LLRINT):
374 CASE_FLT_FN (BUILT_IN_LRINT):
375 CASE_FLT_FN (BUILT_IN_NEARBYINT):
376 CASE_FLT_FN (BUILT_IN_RINT):
377 return !flag_rounding_math;
378
379 default:
380 break;
381 }
382 return false;
383 }
384
385 /* Check whether we may negate an integer constant T without causing
386 overflow. */
387
388 bool
389 may_negate_without_overflow_p (const_tree t)
390 {
391 tree type;
392
393 gcc_assert (TREE_CODE (t) == INTEGER_CST);
394
395 type = TREE_TYPE (t);
396 if (TYPE_UNSIGNED (type))
397 return false;
398
399 return !wi::only_sign_bit_p (t);
400 }
401
402 /* Determine whether an expression T can be cheaply negated using
403 the function negate_expr without introducing undefined overflow. */
404
405 static bool
406 negate_expr_p (tree t)
407 {
408 tree type;
409
410 if (t == 0)
411 return false;
412
413 type = TREE_TYPE (t);
414
415 STRIP_SIGN_NOPS (t);
416 switch (TREE_CODE (t))
417 {
418 case INTEGER_CST:
419 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
420 return true;
421
422 /* Check that -CST will not overflow type. */
423 return may_negate_without_overflow_p (t);
424 case BIT_NOT_EXPR:
425 return (INTEGRAL_TYPE_P (type)
426 && TYPE_OVERFLOW_WRAPS (type));
427
428 case FIXED_CST:
429 return true;
430
431 case NEGATE_EXPR:
432 return !TYPE_OVERFLOW_SANITIZED (type);
433
434 case REAL_CST:
435 /* We want to canonicalize to positive real constants. Pretend
436 that only negative ones can be easily negated. */
437 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
438
439 case COMPLEX_CST:
440 return negate_expr_p (TREE_REALPART (t))
441 && negate_expr_p (TREE_IMAGPART (t));
442
443 case VECTOR_CST:
444 {
445 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
446 return true;
447
448 int count = TYPE_VECTOR_SUBPARTS (type), i;
449
450 for (i = 0; i < count; i++)
451 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
452 return false;
453
454 return true;
455 }
456
457 case COMPLEX_EXPR:
458 return negate_expr_p (TREE_OPERAND (t, 0))
459 && negate_expr_p (TREE_OPERAND (t, 1));
460
461 case CONJ_EXPR:
462 return negate_expr_p (TREE_OPERAND (t, 0));
463
464 case PLUS_EXPR:
465 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
466 || HONOR_SIGNED_ZEROS (element_mode (type)))
467 return false;
468 /* -(A + B) -> (-B) - A. */
469 if (negate_expr_p (TREE_OPERAND (t, 1))
470 && reorder_operands_p (TREE_OPERAND (t, 0),
471 TREE_OPERAND (t, 1)))
472 return true;
473 /* -(A + B) -> (-A) - B. */
474 return negate_expr_p (TREE_OPERAND (t, 0));
475
476 case MINUS_EXPR:
477 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
478 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
479 && !HONOR_SIGNED_ZEROS (element_mode (type))
480 && reorder_operands_p (TREE_OPERAND (t, 0),
481 TREE_OPERAND (t, 1));
482
483 case MULT_EXPR:
484 if (TYPE_UNSIGNED (TREE_TYPE (t)))
485 break;
486
487 /* Fall through. */
488
489 case RDIV_EXPR:
490 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
491 return negate_expr_p (TREE_OPERAND (t, 1))
492 || negate_expr_p (TREE_OPERAND (t, 0));
493 break;
494
495 case TRUNC_DIV_EXPR:
496 case ROUND_DIV_EXPR:
497 case EXACT_DIV_EXPR:
498 /* In general we can't negate A / B, because if A is INT_MIN and
499 B is 1, we may turn this into INT_MIN / -1 which is undefined
500 and actually traps on some architectures. But if overflow is
501 undefined, we can negate, because - (INT_MIN / 1) is an
502 overflow. */
503 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
504 {
505 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
506 break;
507 /* If overflow is undefined then we have to be careful because
508 we ask whether it's ok to associate the negate with the
509 division which is not ok for example for
510 -((a - b) / c) where (-(a - b)) / c may invoke undefined
511 overflow because of negating INT_MIN. So do not use
512 negate_expr_p here but open-code the two important cases. */
513 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
514 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
515 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
516 return true;
517 }
518 else if (negate_expr_p (TREE_OPERAND (t, 0)))
519 return true;
520 return negate_expr_p (TREE_OPERAND (t, 1));
521
522 case NOP_EXPR:
523 /* Negate -((double)float) as (double)(-float). */
524 if (TREE_CODE (type) == REAL_TYPE)
525 {
526 tree tem = strip_float_extensions (t);
527 if (tem != t)
528 return negate_expr_p (tem);
529 }
530 break;
531
532 case CALL_EXPR:
533 /* Negate -f(x) as f(-x). */
534 if (negate_mathfn_p (builtin_mathfn_code (t)))
535 return negate_expr_p (CALL_EXPR_ARG (t, 0));
536 break;
537
538 case RSHIFT_EXPR:
539 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
540 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
541 {
542 tree op1 = TREE_OPERAND (t, 1);
543 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
544 return true;
545 }
546 break;
547
548 default:
549 break;
550 }
551 return false;
552 }
553
554 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
555 simplification is possible.
556 If negate_expr_p would return true for T, NULL_TREE will never be
557 returned. */
558
559 static tree
560 fold_negate_expr (location_t loc, tree t)
561 {
562 tree type = TREE_TYPE (t);
563 tree tem;
564
565 switch (TREE_CODE (t))
566 {
567 /* Convert - (~A) to A + 1. */
568 case BIT_NOT_EXPR:
569 if (INTEGRAL_TYPE_P (type))
570 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
571 build_one_cst (type));
572 break;
573
574 case INTEGER_CST:
575 tem = fold_negate_const (t, type);
576 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
577 || (ANY_INTEGRAL_TYPE_P (type)
578 && !TYPE_OVERFLOW_TRAPS (type)
579 && TYPE_OVERFLOW_WRAPS (type))
580 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
581 return tem;
582 break;
583
584 case REAL_CST:
585 tem = fold_negate_const (t, type);
586 return tem;
587
588 case FIXED_CST:
589 tem = fold_negate_const (t, type);
590 return tem;
591
592 case COMPLEX_CST:
593 {
594 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
595 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
596 if (rpart && ipart)
597 return build_complex (type, rpart, ipart);
598 }
599 break;
600
601 case VECTOR_CST:
602 {
603 int count = TYPE_VECTOR_SUBPARTS (type), i;
604 tree *elts = XALLOCAVEC (tree, count);
605
606 for (i = 0; i < count; i++)
607 {
608 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
609 if (elts[i] == NULL_TREE)
610 return NULL_TREE;
611 }
612
613 return build_vector (type, elts);
614 }
615
616 case COMPLEX_EXPR:
617 if (negate_expr_p (t))
618 return fold_build2_loc (loc, COMPLEX_EXPR, type,
619 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
620 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
621 break;
622
623 case CONJ_EXPR:
624 if (negate_expr_p (t))
625 return fold_build1_loc (loc, CONJ_EXPR, type,
626 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
627 break;
628
629 case NEGATE_EXPR:
630 if (!TYPE_OVERFLOW_SANITIZED (type))
631 return TREE_OPERAND (t, 0);
632 break;
633
634 case PLUS_EXPR:
635 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
636 && !HONOR_SIGNED_ZEROS (element_mode (type)))
637 {
638 /* -(A + B) -> (-B) - A. */
639 if (negate_expr_p (TREE_OPERAND (t, 1))
640 && reorder_operands_p (TREE_OPERAND (t, 0),
641 TREE_OPERAND (t, 1)))
642 {
643 tem = negate_expr (TREE_OPERAND (t, 1));
644 return fold_build2_loc (loc, MINUS_EXPR, type,
645 tem, TREE_OPERAND (t, 0));
646 }
647
648 /* -(A + B) -> (-A) - B. */
649 if (negate_expr_p (TREE_OPERAND (t, 0)))
650 {
651 tem = negate_expr (TREE_OPERAND (t, 0));
652 return fold_build2_loc (loc, MINUS_EXPR, type,
653 tem, TREE_OPERAND (t, 1));
654 }
655 }
656 break;
657
658 case MINUS_EXPR:
659 /* - (A - B) -> B - A */
660 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
661 && !HONOR_SIGNED_ZEROS (element_mode (type))
662 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
663 return fold_build2_loc (loc, MINUS_EXPR, type,
664 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
665 break;
666
667 case MULT_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670
671 /* Fall through. */
672
673 case RDIV_EXPR:
674 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
675 {
676 tem = TREE_OPERAND (t, 1);
677 if (negate_expr_p (tem))
678 return fold_build2_loc (loc, TREE_CODE (t), type,
679 TREE_OPERAND (t, 0), negate_expr (tem));
680 tem = TREE_OPERAND (t, 0);
681 if (negate_expr_p (tem))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 negate_expr (tem), TREE_OPERAND (t, 1));
684 }
685 break;
686
687 case TRUNC_DIV_EXPR:
688 case ROUND_DIV_EXPR:
689 case EXACT_DIV_EXPR:
690 /* In general we can't negate A / B, because if A is INT_MIN and
691 B is 1, we may turn this into INT_MIN / -1 which is undefined
692 and actually traps on some architectures. But if overflow is
693 undefined, we can negate, because - (INT_MIN / 1) is an
694 overflow. */
695 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
696 {
697 const char * const warnmsg = G_("assuming signed overflow does not "
698 "occur when negating a division");
699 tem = TREE_OPERAND (t, 1);
700 if (negate_expr_p (tem))
701 {
702 if (INTEGRAL_TYPE_P (type)
703 && (TREE_CODE (tem) != INTEGER_CST
704 || integer_onep (tem)))
705 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
706 return fold_build2_loc (loc, TREE_CODE (t), type,
707 TREE_OPERAND (t, 0), negate_expr (tem));
708 }
709 /* If overflow is undefined then we have to be careful because
710 we ask whether it's ok to associate the negate with the
711 division which is not ok for example for
712 -((a - b) / c) where (-(a - b)) / c may invoke undefined
713 overflow because of negating INT_MIN. So do not use
714 negate_expr_p here but open-code the two important cases. */
715 tem = TREE_OPERAND (t, 0);
716 if ((INTEGRAL_TYPE_P (type)
717 && (TREE_CODE (tem) == NEGATE_EXPR
718 || (TREE_CODE (tem) == INTEGER_CST
719 && may_negate_without_overflow_p (tem))))
720 || !INTEGRAL_TYPE_P (type))
721 return fold_build2_loc (loc, TREE_CODE (t), type,
722 negate_expr (tem), TREE_OPERAND (t, 1));
723 }
724 break;
725
726 case NOP_EXPR:
727 /* Convert -((double)float) into (double)(-float). */
728 if (TREE_CODE (type) == REAL_TYPE)
729 {
730 tem = strip_float_extensions (t);
731 if (tem != t && negate_expr_p (tem))
732 return fold_convert_loc (loc, type, negate_expr (tem));
733 }
734 break;
735
736 case CALL_EXPR:
737 /* Negate -f(x) as f(-x). */
738 if (negate_mathfn_p (builtin_mathfn_code (t))
739 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
740 {
741 tree fndecl, arg;
742
743 fndecl = get_callee_fndecl (t);
744 arg = negate_expr (CALL_EXPR_ARG (t, 0));
745 return build_call_expr_loc (loc, fndecl, 1, arg);
746 }
747 break;
748
749 case RSHIFT_EXPR:
750 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
751 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
752 {
753 tree op1 = TREE_OPERAND (t, 1);
754 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
755 {
756 tree ntype = TYPE_UNSIGNED (type)
757 ? signed_type_for (type)
758 : unsigned_type_for (type);
759 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
760 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
761 return fold_convert_loc (loc, type, temp);
762 }
763 }
764 break;
765
766 default:
767 break;
768 }
769
770 return NULL_TREE;
771 }
772
773 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
774 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
775 return NULL_TREE. */
776
777 static tree
778 negate_expr (tree t)
779 {
780 tree type, tem;
781 location_t loc;
782
783 if (t == NULL_TREE)
784 return NULL_TREE;
785
786 loc = EXPR_LOCATION (t);
787 type = TREE_TYPE (t);
788 STRIP_SIGN_NOPS (t);
789
790 tem = fold_negate_expr (loc, t);
791 if (!tem)
792 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
793 return fold_convert_loc (loc, type, tem);
794 }
795 \f
796 /* Split a tree IN into a constant, literal and variable parts that could be
797 combined with CODE to make IN. "constant" means an expression with
798 TREE_CONSTANT but that isn't an actual constant. CODE must be a
799 commutative arithmetic operation. Store the constant part into *CONP,
800 the literal in *LITP and return the variable part. If a part isn't
801 present, set it to null. If the tree does not decompose in this way,
802 return the entire tree as the variable part and the other parts as null.
803
804 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
805 case, we negate an operand that was subtracted. Except if it is a
806 literal for which we use *MINUS_LITP instead.
807
808 If NEGATE_P is true, we are negating all of IN, again except a literal
809 for which we use *MINUS_LITP instead.
810
811 If IN is itself a literal or constant, return it as appropriate.
812
813 Note that we do not guarantee that any of the three values will be the
814 same type as IN, but they will have the same signedness and mode. */
815
816 static tree
817 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
818 tree *minus_litp, int negate_p)
819 {
820 tree var = 0;
821
822 *conp = 0;
823 *litp = 0;
824 *minus_litp = 0;
825
826 /* Strip any conversions that don't change the machine mode or signedness. */
827 STRIP_SIGN_NOPS (in);
828
829 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
830 || TREE_CODE (in) == FIXED_CST)
831 *litp = in;
832 else if (TREE_CODE (in) == code
833 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
834 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
835 /* We can associate addition and subtraction together (even
836 though the C standard doesn't say so) for integers because
837 the value is not affected. For reals, the value might be
838 affected, so we can't. */
839 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
840 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
841 {
842 tree op0 = TREE_OPERAND (in, 0);
843 tree op1 = TREE_OPERAND (in, 1);
844 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
845 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
846
847 /* First see if either of the operands is a literal, then a constant. */
848 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
849 || TREE_CODE (op0) == FIXED_CST)
850 *litp = op0, op0 = 0;
851 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
852 || TREE_CODE (op1) == FIXED_CST)
853 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
854
855 if (op0 != 0 && TREE_CONSTANT (op0))
856 *conp = op0, op0 = 0;
857 else if (op1 != 0 && TREE_CONSTANT (op1))
858 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
859
860 /* If we haven't dealt with either operand, this is not a case we can
861 decompose. Otherwise, VAR is either of the ones remaining, if any. */
862 if (op0 != 0 && op1 != 0)
863 var = in;
864 else if (op0 != 0)
865 var = op0;
866 else
867 var = op1, neg_var_p = neg1_p;
868
869 /* Now do any needed negations. */
870 if (neg_litp_p)
871 *minus_litp = *litp, *litp = 0;
872 if (neg_conp_p)
873 *conp = negate_expr (*conp);
874 if (neg_var_p)
875 var = negate_expr (var);
876 }
877 else if (TREE_CODE (in) == BIT_NOT_EXPR
878 && code == PLUS_EXPR)
879 {
880 /* -X - 1 is folded to ~X, undo that here. */
881 *minus_litp = build_one_cst (TREE_TYPE (in));
882 var = negate_expr (TREE_OPERAND (in, 0));
883 }
884 else if (TREE_CONSTANT (in))
885 *conp = in;
886 else
887 var = in;
888
889 if (negate_p)
890 {
891 if (*litp)
892 *minus_litp = *litp, *litp = 0;
893 else if (*minus_litp)
894 *litp = *minus_litp, *minus_litp = 0;
895 *conp = negate_expr (*conp);
896 var = negate_expr (var);
897 }
898
899 return var;
900 }
901
902 /* Re-associate trees split by the above function. T1 and T2 are
903 either expressions to associate or null. Return the new
904 expression, if any. LOC is the location of the new expression. If
905 we build an operation, do it in TYPE and with CODE. */
906
907 static tree
908 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
909 {
910 if (t1 == 0)
911 return t2;
912 else if (t2 == 0)
913 return t1;
914
915 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
916 try to fold this since we will have infinite recursion. But do
917 deal with any NEGATE_EXPRs. */
918 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
919 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
920 {
921 if (code == PLUS_EXPR)
922 {
923 if (TREE_CODE (t1) == NEGATE_EXPR)
924 return build2_loc (loc, MINUS_EXPR, type,
925 fold_convert_loc (loc, type, t2),
926 fold_convert_loc (loc, type,
927 TREE_OPERAND (t1, 0)));
928 else if (TREE_CODE (t2) == NEGATE_EXPR)
929 return build2_loc (loc, MINUS_EXPR, type,
930 fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type,
932 TREE_OPERAND (t2, 0)));
933 else if (integer_zerop (t2))
934 return fold_convert_loc (loc, type, t1);
935 }
936 else if (code == MINUS_EXPR)
937 {
938 if (integer_zerop (t2))
939 return fold_convert_loc (loc, type, t1);
940 }
941
942 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
943 fold_convert_loc (loc, type, t2));
944 }
945
946 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
947 fold_convert_loc (loc, type, t2));
948 }
949 \f
950 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
951 for use in int_const_binop, size_binop and size_diffop. */
952
953 static bool
954 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
955 {
956 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
957 return false;
958 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
959 return false;
960
961 switch (code)
962 {
963 case LSHIFT_EXPR:
964 case RSHIFT_EXPR:
965 case LROTATE_EXPR:
966 case RROTATE_EXPR:
967 return true;
968
969 default:
970 break;
971 }
972
973 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
974 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
975 && TYPE_MODE (type1) == TYPE_MODE (type2);
976 }
977
978
979 /* Combine two integer constants ARG1 and ARG2 under operation CODE
980 to produce a new constant. Return NULL_TREE if we don't know how
981 to evaluate CODE at compile-time. */
982
983 static tree
984 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
985 int overflowable)
986 {
987 wide_int res;
988 tree t;
989 tree type = TREE_TYPE (arg1);
990 signop sign = TYPE_SIGN (type);
991 bool overflow = false;
992
993 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
994 TYPE_SIGN (TREE_TYPE (parg2)));
995
996 switch (code)
997 {
998 case BIT_IOR_EXPR:
999 res = wi::bit_or (arg1, arg2);
1000 break;
1001
1002 case BIT_XOR_EXPR:
1003 res = wi::bit_xor (arg1, arg2);
1004 break;
1005
1006 case BIT_AND_EXPR:
1007 res = wi::bit_and (arg1, arg2);
1008 break;
1009
1010 case RSHIFT_EXPR:
1011 case LSHIFT_EXPR:
1012 if (wi::neg_p (arg2))
1013 {
1014 arg2 = -arg2;
1015 if (code == RSHIFT_EXPR)
1016 code = LSHIFT_EXPR;
1017 else
1018 code = RSHIFT_EXPR;
1019 }
1020
1021 if (code == RSHIFT_EXPR)
1022 /* It's unclear from the C standard whether shifts can overflow.
1023 The following code ignores overflow; perhaps a C standard
1024 interpretation ruling is needed. */
1025 res = wi::rshift (arg1, arg2, sign);
1026 else
1027 res = wi::lshift (arg1, arg2);
1028 break;
1029
1030 case RROTATE_EXPR:
1031 case LROTATE_EXPR:
1032 if (wi::neg_p (arg2))
1033 {
1034 arg2 = -arg2;
1035 if (code == RROTATE_EXPR)
1036 code = LROTATE_EXPR;
1037 else
1038 code = RROTATE_EXPR;
1039 }
1040
1041 if (code == RROTATE_EXPR)
1042 res = wi::rrotate (arg1, arg2);
1043 else
1044 res = wi::lrotate (arg1, arg2);
1045 break;
1046
1047 case PLUS_EXPR:
1048 res = wi::add (arg1, arg2, sign, &overflow);
1049 break;
1050
1051 case MINUS_EXPR:
1052 res = wi::sub (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case MULT_EXPR:
1056 res = wi::mul (arg1, arg2, sign, &overflow);
1057 break;
1058
1059 case MULT_HIGHPART_EXPR:
1060 res = wi::mul_high (arg1, arg2, sign);
1061 break;
1062
1063 case TRUNC_DIV_EXPR:
1064 case EXACT_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case FLOOR_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_floor (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case CEIL_DIV_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case ROUND_DIV_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::div_round (arg1, arg2, sign, &overflow);
1086 break;
1087
1088 case TRUNC_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1092 break;
1093
1094 case FLOOR_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1098 break;
1099
1100 case CEIL_MOD_EXPR:
1101 if (arg2 == 0)
1102 return NULL_TREE;
1103 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1104 break;
1105
1106 case ROUND_MOD_EXPR:
1107 if (arg2 == 0)
1108 return NULL_TREE;
1109 res = wi::mod_round (arg1, arg2, sign, &overflow);
1110 break;
1111
1112 case MIN_EXPR:
1113 res = wi::min (arg1, arg2, sign);
1114 break;
1115
1116 case MAX_EXPR:
1117 res = wi::max (arg1, arg2, sign);
1118 break;
1119
1120 default:
1121 return NULL_TREE;
1122 }
1123
1124 t = force_fit_type (type, res, overflowable,
1125 (((sign == SIGNED || overflowable == -1)
1126 && overflow)
1127 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1128
1129 return t;
1130 }
1131
1132 tree
1133 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1134 {
1135 return int_const_binop_1 (code, arg1, arg2, 1);
1136 }
1137
1138 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1139 constant. We assume ARG1 and ARG2 have the same data type, or at least
1140 are the same kind of constant and the same machine mode. Return zero if
1141 combining the constants is not allowed in the current operating mode. */
1142
1143 static tree
1144 const_binop (enum tree_code code, tree arg1, tree arg2)
1145 {
1146 /* Sanity check for the recursive cases. */
1147 if (!arg1 || !arg2)
1148 return NULL_TREE;
1149
1150 STRIP_NOPS (arg1);
1151 STRIP_NOPS (arg2);
1152
1153 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1154 {
1155 if (code == POINTER_PLUS_EXPR)
1156 return int_const_binop (PLUS_EXPR,
1157 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1158
1159 return int_const_binop (code, arg1, arg2);
1160 }
1161
1162 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1163 {
1164 machine_mode mode;
1165 REAL_VALUE_TYPE d1;
1166 REAL_VALUE_TYPE d2;
1167 REAL_VALUE_TYPE value;
1168 REAL_VALUE_TYPE result;
1169 bool inexact;
1170 tree t, type;
1171
1172 /* The following codes are handled by real_arithmetic. */
1173 switch (code)
1174 {
1175 case PLUS_EXPR:
1176 case MINUS_EXPR:
1177 case MULT_EXPR:
1178 case RDIV_EXPR:
1179 case MIN_EXPR:
1180 case MAX_EXPR:
1181 break;
1182
1183 default:
1184 return NULL_TREE;
1185 }
1186
1187 d1 = TREE_REAL_CST (arg1);
1188 d2 = TREE_REAL_CST (arg2);
1189
1190 type = TREE_TYPE (arg1);
1191 mode = TYPE_MODE (type);
1192
1193 /* Don't perform operation if we honor signaling NaNs and
1194 either operand is a NaN. */
1195 if (HONOR_SNANS (mode)
1196 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1197 return NULL_TREE;
1198
1199 /* Don't perform operation if it would raise a division
1200 by zero exception. */
1201 if (code == RDIV_EXPR
1202 && REAL_VALUES_EQUAL (d2, dconst0)
1203 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1204 return NULL_TREE;
1205
1206 /* If either operand is a NaN, just return it. Otherwise, set up
1207 for floating-point trap; we return an overflow. */
1208 if (REAL_VALUE_ISNAN (d1))
1209 return arg1;
1210 else if (REAL_VALUE_ISNAN (d2))
1211 return arg2;
1212
1213 inexact = real_arithmetic (&value, code, &d1, &d2);
1214 real_convert (&result, mode, &value);
1215
1216 /* Don't constant fold this floating point operation if
1217 the result has overflowed and flag_trapping_math. */
1218 if (flag_trapping_math
1219 && MODE_HAS_INFINITIES (mode)
1220 && REAL_VALUE_ISINF (result)
1221 && !REAL_VALUE_ISINF (d1)
1222 && !REAL_VALUE_ISINF (d2))
1223 return NULL_TREE;
1224
1225 /* Don't constant fold this floating point operation if the
1226 result may dependent upon the run-time rounding mode and
1227 flag_rounding_math is set, or if GCC's software emulation
1228 is unable to accurately represent the result. */
1229 if ((flag_rounding_math
1230 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1231 && (inexact || !real_identical (&result, &value)))
1232 return NULL_TREE;
1233
1234 t = build_real (type, result);
1235
1236 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1237 return t;
1238 }
1239
1240 if (TREE_CODE (arg1) == FIXED_CST)
1241 {
1242 FIXED_VALUE_TYPE f1;
1243 FIXED_VALUE_TYPE f2;
1244 FIXED_VALUE_TYPE result;
1245 tree t, type;
1246 int sat_p;
1247 bool overflow_p;
1248
1249 /* The following codes are handled by fixed_arithmetic. */
1250 switch (code)
1251 {
1252 case PLUS_EXPR:
1253 case MINUS_EXPR:
1254 case MULT_EXPR:
1255 case TRUNC_DIV_EXPR:
1256 if (TREE_CODE (arg2) != FIXED_CST)
1257 return NULL_TREE;
1258 f2 = TREE_FIXED_CST (arg2);
1259 break;
1260
1261 case LSHIFT_EXPR:
1262 case RSHIFT_EXPR:
1263 {
1264 if (TREE_CODE (arg2) != INTEGER_CST)
1265 return NULL_TREE;
1266 wide_int w2 = arg2;
1267 f2.data.high = w2.elt (1);
1268 f2.data.low = w2.elt (0);
1269 f2.mode = SImode;
1270 }
1271 break;
1272
1273 default:
1274 return NULL_TREE;
1275 }
1276
1277 f1 = TREE_FIXED_CST (arg1);
1278 type = TREE_TYPE (arg1);
1279 sat_p = TYPE_SATURATING (type);
1280 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1281 t = build_fixed (type, result);
1282 /* Propagate overflow flags. */
1283 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1284 TREE_OVERFLOW (t) = 1;
1285 return t;
1286 }
1287
1288 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1289 {
1290 tree type = TREE_TYPE (arg1);
1291 tree r1 = TREE_REALPART (arg1);
1292 tree i1 = TREE_IMAGPART (arg1);
1293 tree r2 = TREE_REALPART (arg2);
1294 tree i2 = TREE_IMAGPART (arg2);
1295 tree real, imag;
1296
1297 switch (code)
1298 {
1299 case PLUS_EXPR:
1300 case MINUS_EXPR:
1301 real = const_binop (code, r1, r2);
1302 imag = const_binop (code, i1, i2);
1303 break;
1304
1305 case MULT_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_mul);
1310
1311 real = const_binop (MINUS_EXPR,
1312 const_binop (MULT_EXPR, r1, r2),
1313 const_binop (MULT_EXPR, i1, i2));
1314 imag = const_binop (PLUS_EXPR,
1315 const_binop (MULT_EXPR, r1, i2),
1316 const_binop (MULT_EXPR, i1, r2));
1317 break;
1318
1319 case RDIV_EXPR:
1320 if (COMPLEX_FLOAT_TYPE_P (type))
1321 return do_mpc_arg2 (arg1, arg2, type,
1322 /* do_nonfinite= */ folding_initializer,
1323 mpc_div);
1324 /* Fallthru ... */
1325 case TRUNC_DIV_EXPR:
1326 case CEIL_DIV_EXPR:
1327 case FLOOR_DIV_EXPR:
1328 case ROUND_DIV_EXPR:
1329 if (flag_complex_method == 0)
1330 {
1331 /* Keep this algorithm in sync with
1332 tree-complex.c:expand_complex_div_straight().
1333
1334 Expand complex division to scalars, straightforward algorithm.
1335 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1336 t = br*br + bi*bi
1337 */
1338 tree magsquared
1339 = const_binop (PLUS_EXPR,
1340 const_binop (MULT_EXPR, r2, r2),
1341 const_binop (MULT_EXPR, i2, i2));
1342 tree t1
1343 = const_binop (PLUS_EXPR,
1344 const_binop (MULT_EXPR, r1, r2),
1345 const_binop (MULT_EXPR, i1, i2));
1346 tree t2
1347 = const_binop (MINUS_EXPR,
1348 const_binop (MULT_EXPR, i1, r2),
1349 const_binop (MULT_EXPR, r1, i2));
1350
1351 real = const_binop (code, t1, magsquared);
1352 imag = const_binop (code, t2, magsquared);
1353 }
1354 else
1355 {
1356 /* Keep this algorithm in sync with
1357 tree-complex.c:expand_complex_div_wide().
1358
1359 Expand complex division to scalars, modified algorithm to minimize
1360 overflow with wide input ranges. */
1361 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1362 fold_abs_const (r2, TREE_TYPE (type)),
1363 fold_abs_const (i2, TREE_TYPE (type)));
1364
1365 if (integer_nonzerop (compare))
1366 {
1367 /* In the TRUE branch, we compute
1368 ratio = br/bi;
1369 div = (br * ratio) + bi;
1370 tr = (ar * ratio) + ai;
1371 ti = (ai * ratio) - ar;
1372 tr = tr / div;
1373 ti = ti / div; */
1374 tree ratio = const_binop (code, r2, i2);
1375 tree div = const_binop (PLUS_EXPR, i2,
1376 const_binop (MULT_EXPR, r2, ratio));
1377 real = const_binop (MULT_EXPR, r1, ratio);
1378 real = const_binop (PLUS_EXPR, real, i1);
1379 real = const_binop (code, real, div);
1380
1381 imag = const_binop (MULT_EXPR, i1, ratio);
1382 imag = const_binop (MINUS_EXPR, imag, r1);
1383 imag = const_binop (code, imag, div);
1384 }
1385 else
1386 {
1387 /* In the FALSE branch, we compute
1388 ratio = d/c;
1389 divisor = (d * ratio) + c;
1390 tr = (b * ratio) + a;
1391 ti = b - (a * ratio);
1392 tr = tr / div;
1393 ti = ti / div; */
1394 tree ratio = const_binop (code, i2, r2);
1395 tree div = const_binop (PLUS_EXPR, r2,
1396 const_binop (MULT_EXPR, i2, ratio));
1397
1398 real = const_binop (MULT_EXPR, i1, ratio);
1399 real = const_binop (PLUS_EXPR, real, r1);
1400 real = const_binop (code, real, div);
1401
1402 imag = const_binop (MULT_EXPR, r1, ratio);
1403 imag = const_binop (MINUS_EXPR, i1, imag);
1404 imag = const_binop (code, imag, div);
1405 }
1406 }
1407 break;
1408
1409 default:
1410 return NULL_TREE;
1411 }
1412
1413 if (real && imag)
1414 return build_complex (type, real, imag);
1415 }
1416
1417 if (TREE_CODE (arg1) == VECTOR_CST
1418 && TREE_CODE (arg2) == VECTOR_CST)
1419 {
1420 tree type = TREE_TYPE (arg1);
1421 int count = TYPE_VECTOR_SUBPARTS (type), i;
1422 tree *elts = XALLOCAVEC (tree, count);
1423
1424 for (i = 0; i < count; i++)
1425 {
1426 tree elem1 = VECTOR_CST_ELT (arg1, i);
1427 tree elem2 = VECTOR_CST_ELT (arg2, i);
1428
1429 elts[i] = const_binop (code, elem1, elem2);
1430
1431 /* It is possible that const_binop cannot handle the given
1432 code and return NULL_TREE */
1433 if (elts[i] == NULL_TREE)
1434 return NULL_TREE;
1435 }
1436
1437 return build_vector (type, elts);
1438 }
1439
1440 /* Shifts allow a scalar offset for a vector. */
1441 if (TREE_CODE (arg1) == VECTOR_CST
1442 && TREE_CODE (arg2) == INTEGER_CST)
1443 {
1444 tree type = TREE_TYPE (arg1);
1445 int count = TYPE_VECTOR_SUBPARTS (type), i;
1446 tree *elts = XALLOCAVEC (tree, count);
1447
1448 for (i = 0; i < count; i++)
1449 {
1450 tree elem1 = VECTOR_CST_ELT (arg1, i);
1451
1452 elts[i] = const_binop (code, elem1, arg2);
1453
1454 /* It is possible that const_binop cannot handle the given
1455 code and return NULL_TREE. */
1456 if (elts[i] == NULL_TREE)
1457 return NULL_TREE;
1458 }
1459
1460 return build_vector (type, elts);
1461 }
1462 return NULL_TREE;
1463 }
1464
1465 /* Overload that adds a TYPE parameter to be able to dispatch
1466 to fold_relational_const. */
1467
1468 tree
1469 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1470 {
1471 if (TREE_CODE_CLASS (code) == tcc_comparison)
1472 return fold_relational_const (code, type, arg1, arg2);
1473
1474 /* ??? Until we make the const_binop worker take the type of the
1475 result as argument put those cases that need it here. */
1476 switch (code)
1477 {
1478 case COMPLEX_EXPR:
1479 if ((TREE_CODE (arg1) == REAL_CST
1480 && TREE_CODE (arg2) == REAL_CST)
1481 || (TREE_CODE (arg1) == INTEGER_CST
1482 && TREE_CODE (arg2) == INTEGER_CST))
1483 return build_complex (type, arg1, arg2);
1484 return NULL_TREE;
1485
1486 case VEC_PACK_TRUNC_EXPR:
1487 case VEC_PACK_FIX_TRUNC_EXPR:
1488 {
1489 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1490 tree *elts;
1491
1492 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1493 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1494 if (TREE_CODE (arg1) != VECTOR_CST
1495 || TREE_CODE (arg2) != VECTOR_CST)
1496 return NULL_TREE;
1497
1498 elts = XALLOCAVEC (tree, nelts);
1499 if (!vec_cst_ctor_to_array (arg1, elts)
1500 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1501 return NULL_TREE;
1502
1503 for (i = 0; i < nelts; i++)
1504 {
1505 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1506 ? NOP_EXPR : FIX_TRUNC_EXPR,
1507 TREE_TYPE (type), elts[i]);
1508 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1509 return NULL_TREE;
1510 }
1511
1512 return build_vector (type, elts);
1513 }
1514
1515 case VEC_WIDEN_MULT_LO_EXPR:
1516 case VEC_WIDEN_MULT_HI_EXPR:
1517 case VEC_WIDEN_MULT_EVEN_EXPR:
1518 case VEC_WIDEN_MULT_ODD_EXPR:
1519 {
1520 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1521 unsigned int out, ofs, scale;
1522 tree *elts;
1523
1524 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1525 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1526 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1527 return NULL_TREE;
1528
1529 elts = XALLOCAVEC (tree, nelts * 4);
1530 if (!vec_cst_ctor_to_array (arg1, elts)
1531 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1532 return NULL_TREE;
1533
1534 if (code == VEC_WIDEN_MULT_LO_EXPR)
1535 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1536 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1537 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1538 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1539 scale = 1, ofs = 0;
1540 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1541 scale = 1, ofs = 1;
1542
1543 for (out = 0; out < nelts; out++)
1544 {
1545 unsigned int in1 = (out << scale) + ofs;
1546 unsigned int in2 = in1 + nelts * 2;
1547 tree t1, t2;
1548
1549 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1550 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1551
1552 if (t1 == NULL_TREE || t2 == NULL_TREE)
1553 return NULL_TREE;
1554 elts[out] = const_binop (MULT_EXPR, t1, t2);
1555 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1556 return NULL_TREE;
1557 }
1558
1559 return build_vector (type, elts);
1560 }
1561
1562 default:;
1563 }
1564
1565 if (TREE_CODE_CLASS (code) != tcc_binary)
1566 return NULL_TREE;
1567
1568 /* Make sure type and arg0 have the same saturating flag. */
1569 gcc_checking_assert (TYPE_SATURATING (type)
1570 == TYPE_SATURATING (TREE_TYPE (arg1)));
1571
1572 return const_binop (code, arg1, arg2);
1573 }
1574
1575 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1576 Return zero if computing the constants is not possible. */
1577
1578 tree
1579 const_unop (enum tree_code code, tree type, tree arg0)
1580 {
1581 switch (code)
1582 {
1583 CASE_CONVERT:
1584 case FLOAT_EXPR:
1585 case FIX_TRUNC_EXPR:
1586 case FIXED_CONVERT_EXPR:
1587 return fold_convert_const (code, type, arg0);
1588
1589 case ADDR_SPACE_CONVERT_EXPR:
1590 if (integer_zerop (arg0))
1591 return fold_convert_const (code, type, arg0);
1592 break;
1593
1594 case VIEW_CONVERT_EXPR:
1595 return fold_view_convert_expr (type, arg0);
1596
1597 case NEGATE_EXPR:
1598 {
1599 /* Can't call fold_negate_const directly here as that doesn't
1600 handle all cases and we might not be able to negate some
1601 constants. */
1602 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1603 if (tem && CONSTANT_CLASS_P (tem))
1604 return tem;
1605 break;
1606 }
1607
1608 case ABS_EXPR:
1609 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1610 return fold_abs_const (arg0, type);
1611 break;
1612
1613 case CONJ_EXPR:
1614 if (TREE_CODE (arg0) == COMPLEX_CST)
1615 {
1616 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1617 TREE_TYPE (type));
1618 return build_complex (type, TREE_REALPART (arg0), ipart);
1619 }
1620 break;
1621
1622 case BIT_NOT_EXPR:
1623 if (TREE_CODE (arg0) == INTEGER_CST)
1624 return fold_not_const (arg0, type);
1625 /* Perform BIT_NOT_EXPR on each element individually. */
1626 else if (TREE_CODE (arg0) == VECTOR_CST)
1627 {
1628 tree *elements;
1629 tree elem;
1630 unsigned count = VECTOR_CST_NELTS (arg0), i;
1631
1632 elements = XALLOCAVEC (tree, count);
1633 for (i = 0; i < count; i++)
1634 {
1635 elem = VECTOR_CST_ELT (arg0, i);
1636 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1637 if (elem == NULL_TREE)
1638 break;
1639 elements[i] = elem;
1640 }
1641 if (i == count)
1642 return build_vector (type, elements);
1643 }
1644 break;
1645
1646 case TRUTH_NOT_EXPR:
1647 if (TREE_CODE (arg0) == INTEGER_CST)
1648 return constant_boolean_node (integer_zerop (arg0), type);
1649 break;
1650
1651 case REALPART_EXPR:
1652 if (TREE_CODE (arg0) == COMPLEX_CST)
1653 return fold_convert (type, TREE_REALPART (arg0));
1654 break;
1655
1656 case IMAGPART_EXPR:
1657 if (TREE_CODE (arg0) == COMPLEX_CST)
1658 return fold_convert (type, TREE_IMAGPART (arg0));
1659 break;
1660
1661 case VEC_UNPACK_LO_EXPR:
1662 case VEC_UNPACK_HI_EXPR:
1663 case VEC_UNPACK_FLOAT_LO_EXPR:
1664 case VEC_UNPACK_FLOAT_HI_EXPR:
1665 {
1666 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1667 tree *elts;
1668 enum tree_code subcode;
1669
1670 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1671 if (TREE_CODE (arg0) != VECTOR_CST)
1672 return NULL_TREE;
1673
1674 elts = XALLOCAVEC (tree, nelts * 2);
1675 if (!vec_cst_ctor_to_array (arg0, elts))
1676 return NULL_TREE;
1677
1678 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1679 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1680 elts += nelts;
1681
1682 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1683 subcode = NOP_EXPR;
1684 else
1685 subcode = FLOAT_EXPR;
1686
1687 for (i = 0; i < nelts; i++)
1688 {
1689 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1690 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1691 return NULL_TREE;
1692 }
1693
1694 return build_vector (type, elts);
1695 }
1696
1697 case REDUC_MIN_EXPR:
1698 case REDUC_MAX_EXPR:
1699 case REDUC_PLUS_EXPR:
1700 {
1701 unsigned int nelts, i;
1702 tree *elts;
1703 enum tree_code subcode;
1704
1705 if (TREE_CODE (arg0) != VECTOR_CST)
1706 return NULL_TREE;
1707 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1708
1709 elts = XALLOCAVEC (tree, nelts);
1710 if (!vec_cst_ctor_to_array (arg0, elts))
1711 return NULL_TREE;
1712
1713 switch (code)
1714 {
1715 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1716 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1717 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1718 default: gcc_unreachable ();
1719 }
1720
1721 for (i = 1; i < nelts; i++)
1722 {
1723 elts[0] = const_binop (subcode, elts[0], elts[i]);
1724 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1725 return NULL_TREE;
1726 }
1727
1728 return elts[0];
1729 }
1730
1731 default:
1732 break;
1733 }
1734
1735 return NULL_TREE;
1736 }
1737
1738 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1739 indicates which particular sizetype to create. */
1740
1741 tree
1742 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1743 {
1744 return build_int_cst (sizetype_tab[(int) kind], number);
1745 }
1746 \f
1747 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1748 is a tree code. The type of the result is taken from the operands.
1749 Both must be equivalent integer types, ala int_binop_types_match_p.
1750 If the operands are constant, so is the result. */
1751
1752 tree
1753 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1754 {
1755 tree type = TREE_TYPE (arg0);
1756
1757 if (arg0 == error_mark_node || arg1 == error_mark_node)
1758 return error_mark_node;
1759
1760 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1761 TREE_TYPE (arg1)));
1762
1763 /* Handle the special case of two integer constants faster. */
1764 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1765 {
1766 /* And some specific cases even faster than that. */
1767 if (code == PLUS_EXPR)
1768 {
1769 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1772 return arg0;
1773 }
1774 else if (code == MINUS_EXPR)
1775 {
1776 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1777 return arg0;
1778 }
1779 else if (code == MULT_EXPR)
1780 {
1781 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1782 return arg1;
1783 }
1784
1785 /* Handle general case of two integer constants. For sizetype
1786 constant calculations we always want to know about overflow,
1787 even in the unsigned case. */
1788 return int_const_binop_1 (code, arg0, arg1, -1);
1789 }
1790
1791 return fold_build2_loc (loc, code, type, arg0, arg1);
1792 }
1793
1794 /* Given two values, either both of sizetype or both of bitsizetype,
1795 compute the difference between the two values. Return the value
1796 in signed type corresponding to the type of the operands. */
1797
1798 tree
1799 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1800 {
1801 tree type = TREE_TYPE (arg0);
1802 tree ctype;
1803
1804 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1805 TREE_TYPE (arg1)));
1806
1807 /* If the type is already signed, just do the simple thing. */
1808 if (!TYPE_UNSIGNED (type))
1809 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1810
1811 if (type == sizetype)
1812 ctype = ssizetype;
1813 else if (type == bitsizetype)
1814 ctype = sbitsizetype;
1815 else
1816 ctype = signed_type_for (type);
1817
1818 /* If either operand is not a constant, do the conversions to the signed
1819 type and subtract. The hardware will do the right thing with any
1820 overflow in the subtraction. */
1821 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1822 return size_binop_loc (loc, MINUS_EXPR,
1823 fold_convert_loc (loc, ctype, arg0),
1824 fold_convert_loc (loc, ctype, arg1));
1825
1826 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1827 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1828 overflow) and negate (which can't either). Special-case a result
1829 of zero while we're here. */
1830 if (tree_int_cst_equal (arg0, arg1))
1831 return build_int_cst (ctype, 0);
1832 else if (tree_int_cst_lt (arg1, arg0))
1833 return fold_convert_loc (loc, ctype,
1834 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1835 else
1836 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1837 fold_convert_loc (loc, ctype,
1838 size_binop_loc (loc,
1839 MINUS_EXPR,
1840 arg1, arg0)));
1841 }
1842 \f
1843 /* A subroutine of fold_convert_const handling conversions of an
1844 INTEGER_CST to another integer type. */
1845
1846 static tree
1847 fold_convert_const_int_from_int (tree type, const_tree arg1)
1848 {
1849 /* Given an integer constant, make new constant with new type,
1850 appropriately sign-extended or truncated. Use widest_int
1851 so that any extension is done according ARG1's type. */
1852 return force_fit_type (type, wi::to_widest (arg1),
1853 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1854 TREE_OVERFLOW (arg1));
1855 }
1856
1857 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1858 to an integer type. */
1859
1860 static tree
1861 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1862 {
1863 bool overflow = false;
1864 tree t;
1865
1866 /* The following code implements the floating point to integer
1867 conversion rules required by the Java Language Specification,
1868 that IEEE NaNs are mapped to zero and values that overflow
1869 the target precision saturate, i.e. values greater than
1870 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1871 are mapped to INT_MIN. These semantics are allowed by the
1872 C and C++ standards that simply state that the behavior of
1873 FP-to-integer conversion is unspecified upon overflow. */
1874
1875 wide_int val;
1876 REAL_VALUE_TYPE r;
1877 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1878
1879 switch (code)
1880 {
1881 case FIX_TRUNC_EXPR:
1882 real_trunc (&r, VOIDmode, &x);
1883 break;
1884
1885 default:
1886 gcc_unreachable ();
1887 }
1888
1889 /* If R is NaN, return zero and show we have an overflow. */
1890 if (REAL_VALUE_ISNAN (r))
1891 {
1892 overflow = true;
1893 val = wi::zero (TYPE_PRECISION (type));
1894 }
1895
1896 /* See if R is less than the lower bound or greater than the
1897 upper bound. */
1898
1899 if (! overflow)
1900 {
1901 tree lt = TYPE_MIN_VALUE (type);
1902 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1903 if (REAL_VALUES_LESS (r, l))
1904 {
1905 overflow = true;
1906 val = lt;
1907 }
1908 }
1909
1910 if (! overflow)
1911 {
1912 tree ut = TYPE_MAX_VALUE (type);
1913 if (ut)
1914 {
1915 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1916 if (REAL_VALUES_LESS (u, r))
1917 {
1918 overflow = true;
1919 val = ut;
1920 }
1921 }
1922 }
1923
1924 if (! overflow)
1925 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1926
1927 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1928 return t;
1929 }
1930
1931 /* A subroutine of fold_convert_const handling conversions of a
1932 FIXED_CST to an integer type. */
1933
1934 static tree
1935 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1936 {
1937 tree t;
1938 double_int temp, temp_trunc;
1939 unsigned int mode;
1940
1941 /* Right shift FIXED_CST to temp by fbit. */
1942 temp = TREE_FIXED_CST (arg1).data;
1943 mode = TREE_FIXED_CST (arg1).mode;
1944 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1945 {
1946 temp = temp.rshift (GET_MODE_FBIT (mode),
1947 HOST_BITS_PER_DOUBLE_INT,
1948 SIGNED_FIXED_POINT_MODE_P (mode));
1949
1950 /* Left shift temp to temp_trunc by fbit. */
1951 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1952 HOST_BITS_PER_DOUBLE_INT,
1953 SIGNED_FIXED_POINT_MODE_P (mode));
1954 }
1955 else
1956 {
1957 temp = double_int_zero;
1958 temp_trunc = double_int_zero;
1959 }
1960
1961 /* If FIXED_CST is negative, we need to round the value toward 0.
1962 By checking if the fractional bits are not zero to add 1 to temp. */
1963 if (SIGNED_FIXED_POINT_MODE_P (mode)
1964 && temp_trunc.is_negative ()
1965 && TREE_FIXED_CST (arg1).data != temp_trunc)
1966 temp += double_int_one;
1967
1968 /* Given a fixed-point constant, make new constant with new type,
1969 appropriately sign-extended or truncated. */
1970 t = force_fit_type (type, temp, -1,
1971 (temp.is_negative ()
1972 && (TYPE_UNSIGNED (type)
1973 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1974 | TREE_OVERFLOW (arg1));
1975
1976 return t;
1977 }
1978
1979 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1980 to another floating point type. */
1981
1982 static tree
1983 fold_convert_const_real_from_real (tree type, const_tree arg1)
1984 {
1985 REAL_VALUE_TYPE value;
1986 tree t;
1987
1988 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1989 t = build_real (type, value);
1990
1991 /* If converting an infinity or NAN to a representation that doesn't
1992 have one, set the overflow bit so that we can produce some kind of
1993 error message at the appropriate point if necessary. It's not the
1994 most user-friendly message, but it's better than nothing. */
1995 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1996 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1997 TREE_OVERFLOW (t) = 1;
1998 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1999 && !MODE_HAS_NANS (TYPE_MODE (type)))
2000 TREE_OVERFLOW (t) = 1;
2001 /* Regular overflow, conversion produced an infinity in a mode that
2002 can't represent them. */
2003 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2004 && REAL_VALUE_ISINF (value)
2005 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2006 TREE_OVERFLOW (t) = 1;
2007 else
2008 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2009 return t;
2010 }
2011
2012 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2013 to a floating point type. */
2014
2015 static tree
2016 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2017 {
2018 REAL_VALUE_TYPE value;
2019 tree t;
2020
2021 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2022 t = build_real (type, value);
2023
2024 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2025 return t;
2026 }
2027
2028 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2029 to another fixed-point type. */
2030
2031 static tree
2032 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2033 {
2034 FIXED_VALUE_TYPE value;
2035 tree t;
2036 bool overflow_p;
2037
2038 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2039 TYPE_SATURATING (type));
2040 t = build_fixed (type, value);
2041
2042 /* Propagate overflow flags. */
2043 if (overflow_p | TREE_OVERFLOW (arg1))
2044 TREE_OVERFLOW (t) = 1;
2045 return t;
2046 }
2047
2048 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2049 to a fixed-point type. */
2050
2051 static tree
2052 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2053 {
2054 FIXED_VALUE_TYPE value;
2055 tree t;
2056 bool overflow_p;
2057 double_int di;
2058
2059 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2060
2061 di.low = TREE_INT_CST_ELT (arg1, 0);
2062 if (TREE_INT_CST_NUNITS (arg1) == 1)
2063 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2064 else
2065 di.high = TREE_INT_CST_ELT (arg1, 1);
2066
2067 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2068 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2069 TYPE_SATURATING (type));
2070 t = build_fixed (type, value);
2071
2072 /* Propagate overflow flags. */
2073 if (overflow_p | TREE_OVERFLOW (arg1))
2074 TREE_OVERFLOW (t) = 1;
2075 return t;
2076 }
2077
2078 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2079 to a fixed-point type. */
2080
2081 static tree
2082 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2083 {
2084 FIXED_VALUE_TYPE value;
2085 tree t;
2086 bool overflow_p;
2087
2088 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2089 &TREE_REAL_CST (arg1),
2090 TYPE_SATURATING (type));
2091 t = build_fixed (type, value);
2092
2093 /* Propagate overflow flags. */
2094 if (overflow_p | TREE_OVERFLOW (arg1))
2095 TREE_OVERFLOW (t) = 1;
2096 return t;
2097 }
2098
2099 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2100 type TYPE. If no simplification can be done return NULL_TREE. */
2101
2102 static tree
2103 fold_convert_const (enum tree_code code, tree type, tree arg1)
2104 {
2105 if (TREE_TYPE (arg1) == type)
2106 return arg1;
2107
2108 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2109 || TREE_CODE (type) == OFFSET_TYPE)
2110 {
2111 if (TREE_CODE (arg1) == INTEGER_CST)
2112 return fold_convert_const_int_from_int (type, arg1);
2113 else if (TREE_CODE (arg1) == REAL_CST)
2114 return fold_convert_const_int_from_real (code, type, arg1);
2115 else if (TREE_CODE (arg1) == FIXED_CST)
2116 return fold_convert_const_int_from_fixed (type, arg1);
2117 }
2118 else if (TREE_CODE (type) == REAL_TYPE)
2119 {
2120 if (TREE_CODE (arg1) == INTEGER_CST)
2121 return build_real_from_int_cst (type, arg1);
2122 else if (TREE_CODE (arg1) == REAL_CST)
2123 return fold_convert_const_real_from_real (type, arg1);
2124 else if (TREE_CODE (arg1) == FIXED_CST)
2125 return fold_convert_const_real_from_fixed (type, arg1);
2126 }
2127 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2128 {
2129 if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_fixed_from_fixed (type, arg1);
2131 else if (TREE_CODE (arg1) == INTEGER_CST)
2132 return fold_convert_const_fixed_from_int (type, arg1);
2133 else if (TREE_CODE (arg1) == REAL_CST)
2134 return fold_convert_const_fixed_from_real (type, arg1);
2135 }
2136 return NULL_TREE;
2137 }
2138
2139 /* Construct a vector of zero elements of vector type TYPE. */
2140
2141 static tree
2142 build_zero_vector (tree type)
2143 {
2144 tree t;
2145
2146 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2147 return build_vector_from_val (type, t);
2148 }
2149
2150 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2151
2152 bool
2153 fold_convertible_p (const_tree type, const_tree arg)
2154 {
2155 tree orig = TREE_TYPE (arg);
2156
2157 if (type == orig)
2158 return true;
2159
2160 if (TREE_CODE (arg) == ERROR_MARK
2161 || TREE_CODE (type) == ERROR_MARK
2162 || TREE_CODE (orig) == ERROR_MARK)
2163 return false;
2164
2165 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2166 return true;
2167
2168 switch (TREE_CODE (type))
2169 {
2170 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2171 case POINTER_TYPE: case REFERENCE_TYPE:
2172 case OFFSET_TYPE:
2173 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2174 || TREE_CODE (orig) == OFFSET_TYPE)
2175 return true;
2176 return (TREE_CODE (orig) == VECTOR_TYPE
2177 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2178
2179 case REAL_TYPE:
2180 case FIXED_POINT_TYPE:
2181 case COMPLEX_TYPE:
2182 case VECTOR_TYPE:
2183 case VOID_TYPE:
2184 return TREE_CODE (type) == TREE_CODE (orig);
2185
2186 default:
2187 return false;
2188 }
2189 }
2190
2191 /* Convert expression ARG to type TYPE. Used by the middle-end for
2192 simple conversions in preference to calling the front-end's convert. */
2193
2194 tree
2195 fold_convert_loc (location_t loc, tree type, tree arg)
2196 {
2197 tree orig = TREE_TYPE (arg);
2198 tree tem;
2199
2200 if (type == orig)
2201 return arg;
2202
2203 if (TREE_CODE (arg) == ERROR_MARK
2204 || TREE_CODE (type) == ERROR_MARK
2205 || TREE_CODE (orig) == ERROR_MARK)
2206 return error_mark_node;
2207
2208 switch (TREE_CODE (type))
2209 {
2210 case POINTER_TYPE:
2211 case REFERENCE_TYPE:
2212 /* Handle conversions between pointers to different address spaces. */
2213 if (POINTER_TYPE_P (orig)
2214 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2215 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2216 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2217 /* fall through */
2218
2219 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2220 case OFFSET_TYPE:
2221 if (TREE_CODE (arg) == INTEGER_CST)
2222 {
2223 tem = fold_convert_const (NOP_EXPR, type, arg);
2224 if (tem != NULL_TREE)
2225 return tem;
2226 }
2227 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2228 || TREE_CODE (orig) == OFFSET_TYPE)
2229 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2230 if (TREE_CODE (orig) == COMPLEX_TYPE)
2231 return fold_convert_loc (loc, type,
2232 fold_build1_loc (loc, REALPART_EXPR,
2233 TREE_TYPE (orig), arg));
2234 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2235 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2236 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2237
2238 case REAL_TYPE:
2239 if (TREE_CODE (arg) == INTEGER_CST)
2240 {
2241 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2244 }
2245 else if (TREE_CODE (arg) == REAL_CST)
2246 {
2247 tem = fold_convert_const (NOP_EXPR, type, arg);
2248 if (tem != NULL_TREE)
2249 return tem;
2250 }
2251 else if (TREE_CODE (arg) == FIXED_CST)
2252 {
2253 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257
2258 switch (TREE_CODE (orig))
2259 {
2260 case INTEGER_TYPE:
2261 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2262 case POINTER_TYPE: case REFERENCE_TYPE:
2263 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2264
2265 case REAL_TYPE:
2266 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2267
2268 case FIXED_POINT_TYPE:
2269 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2270
2271 case COMPLEX_TYPE:
2272 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2273 return fold_convert_loc (loc, type, tem);
2274
2275 default:
2276 gcc_unreachable ();
2277 }
2278
2279 case FIXED_POINT_TYPE:
2280 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2281 || TREE_CODE (arg) == REAL_CST)
2282 {
2283 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2284 if (tem != NULL_TREE)
2285 goto fold_convert_exit;
2286 }
2287
2288 switch (TREE_CODE (orig))
2289 {
2290 case FIXED_POINT_TYPE:
2291 case INTEGER_TYPE:
2292 case ENUMERAL_TYPE:
2293 case BOOLEAN_TYPE:
2294 case REAL_TYPE:
2295 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2296
2297 case COMPLEX_TYPE:
2298 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2299 return fold_convert_loc (loc, type, tem);
2300
2301 default:
2302 gcc_unreachable ();
2303 }
2304
2305 case COMPLEX_TYPE:
2306 switch (TREE_CODE (orig))
2307 {
2308 case INTEGER_TYPE:
2309 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2310 case POINTER_TYPE: case REFERENCE_TYPE:
2311 case REAL_TYPE:
2312 case FIXED_POINT_TYPE:
2313 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2314 fold_convert_loc (loc, TREE_TYPE (type), arg),
2315 fold_convert_loc (loc, TREE_TYPE (type),
2316 integer_zero_node));
2317 case COMPLEX_TYPE:
2318 {
2319 tree rpart, ipart;
2320
2321 if (TREE_CODE (arg) == COMPLEX_EXPR)
2322 {
2323 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2324 TREE_OPERAND (arg, 0));
2325 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2326 TREE_OPERAND (arg, 1));
2327 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2328 }
2329
2330 arg = save_expr (arg);
2331 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2332 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2333 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2334 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2335 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2336 }
2337
2338 default:
2339 gcc_unreachable ();
2340 }
2341
2342 case VECTOR_TYPE:
2343 if (integer_zerop (arg))
2344 return build_zero_vector (type);
2345 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2346 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2347 || TREE_CODE (orig) == VECTOR_TYPE);
2348 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2349
2350 case VOID_TYPE:
2351 tem = fold_ignored_result (arg);
2352 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2353
2354 default:
2355 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2356 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2357 gcc_unreachable ();
2358 }
2359 fold_convert_exit:
2360 protected_set_expr_location_unshare (tem, loc);
2361 return tem;
2362 }
2363 \f
2364 /* Return false if expr can be assumed not to be an lvalue, true
2365 otherwise. */
2366
2367 static bool
2368 maybe_lvalue_p (const_tree x)
2369 {
2370 /* We only need to wrap lvalue tree codes. */
2371 switch (TREE_CODE (x))
2372 {
2373 case VAR_DECL:
2374 case PARM_DECL:
2375 case RESULT_DECL:
2376 case LABEL_DECL:
2377 case FUNCTION_DECL:
2378 case SSA_NAME:
2379
2380 case COMPONENT_REF:
2381 case MEM_REF:
2382 case INDIRECT_REF:
2383 case ARRAY_REF:
2384 case ARRAY_RANGE_REF:
2385 case BIT_FIELD_REF:
2386 case OBJ_TYPE_REF:
2387
2388 case REALPART_EXPR:
2389 case IMAGPART_EXPR:
2390 case PREINCREMENT_EXPR:
2391 case PREDECREMENT_EXPR:
2392 case SAVE_EXPR:
2393 case TRY_CATCH_EXPR:
2394 case WITH_CLEANUP_EXPR:
2395 case COMPOUND_EXPR:
2396 case MODIFY_EXPR:
2397 case TARGET_EXPR:
2398 case COND_EXPR:
2399 case BIND_EXPR:
2400 break;
2401
2402 default:
2403 /* Assume the worst for front-end tree codes. */
2404 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2405 break;
2406 return false;
2407 }
2408
2409 return true;
2410 }
2411
2412 /* Return an expr equal to X but certainly not valid as an lvalue. */
2413
2414 tree
2415 non_lvalue_loc (location_t loc, tree x)
2416 {
2417 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2418 us. */
2419 if (in_gimple_form)
2420 return x;
2421
2422 if (! maybe_lvalue_p (x))
2423 return x;
2424 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2425 }
2426
2427 /* When pedantic, return an expr equal to X but certainly not valid as a
2428 pedantic lvalue. Otherwise, return X. */
2429
2430 static tree
2431 pedantic_non_lvalue_loc (location_t loc, tree x)
2432 {
2433 return protected_set_expr_location_unshare (x, loc);
2434 }
2435 \f
2436 /* Given a tree comparison code, return the code that is the logical inverse.
2437 It is generally not safe to do this for floating-point comparisons, except
2438 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2439 ERROR_MARK in this case. */
2440
2441 enum tree_code
2442 invert_tree_comparison (enum tree_code code, bool honor_nans)
2443 {
2444 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2445 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2446 return ERROR_MARK;
2447
2448 switch (code)
2449 {
2450 case EQ_EXPR:
2451 return NE_EXPR;
2452 case NE_EXPR:
2453 return EQ_EXPR;
2454 case GT_EXPR:
2455 return honor_nans ? UNLE_EXPR : LE_EXPR;
2456 case GE_EXPR:
2457 return honor_nans ? UNLT_EXPR : LT_EXPR;
2458 case LT_EXPR:
2459 return honor_nans ? UNGE_EXPR : GE_EXPR;
2460 case LE_EXPR:
2461 return honor_nans ? UNGT_EXPR : GT_EXPR;
2462 case LTGT_EXPR:
2463 return UNEQ_EXPR;
2464 case UNEQ_EXPR:
2465 return LTGT_EXPR;
2466 case UNGT_EXPR:
2467 return LE_EXPR;
2468 case UNGE_EXPR:
2469 return LT_EXPR;
2470 case UNLT_EXPR:
2471 return GE_EXPR;
2472 case UNLE_EXPR:
2473 return GT_EXPR;
2474 case ORDERED_EXPR:
2475 return UNORDERED_EXPR;
2476 case UNORDERED_EXPR:
2477 return ORDERED_EXPR;
2478 default:
2479 gcc_unreachable ();
2480 }
2481 }
2482
2483 /* Similar, but return the comparison that results if the operands are
2484 swapped. This is safe for floating-point. */
2485
2486 enum tree_code
2487 swap_tree_comparison (enum tree_code code)
2488 {
2489 switch (code)
2490 {
2491 case EQ_EXPR:
2492 case NE_EXPR:
2493 case ORDERED_EXPR:
2494 case UNORDERED_EXPR:
2495 case LTGT_EXPR:
2496 case UNEQ_EXPR:
2497 return code;
2498 case GT_EXPR:
2499 return LT_EXPR;
2500 case GE_EXPR:
2501 return LE_EXPR;
2502 case LT_EXPR:
2503 return GT_EXPR;
2504 case LE_EXPR:
2505 return GE_EXPR;
2506 case UNGT_EXPR:
2507 return UNLT_EXPR;
2508 case UNGE_EXPR:
2509 return UNLE_EXPR;
2510 case UNLT_EXPR:
2511 return UNGT_EXPR;
2512 case UNLE_EXPR:
2513 return UNGE_EXPR;
2514 default:
2515 gcc_unreachable ();
2516 }
2517 }
2518
2519
2520 /* Convert a comparison tree code from an enum tree_code representation
2521 into a compcode bit-based encoding. This function is the inverse of
2522 compcode_to_comparison. */
2523
2524 static enum comparison_code
2525 comparison_to_compcode (enum tree_code code)
2526 {
2527 switch (code)
2528 {
2529 case LT_EXPR:
2530 return COMPCODE_LT;
2531 case EQ_EXPR:
2532 return COMPCODE_EQ;
2533 case LE_EXPR:
2534 return COMPCODE_LE;
2535 case GT_EXPR:
2536 return COMPCODE_GT;
2537 case NE_EXPR:
2538 return COMPCODE_NE;
2539 case GE_EXPR:
2540 return COMPCODE_GE;
2541 case ORDERED_EXPR:
2542 return COMPCODE_ORD;
2543 case UNORDERED_EXPR:
2544 return COMPCODE_UNORD;
2545 case UNLT_EXPR:
2546 return COMPCODE_UNLT;
2547 case UNEQ_EXPR:
2548 return COMPCODE_UNEQ;
2549 case UNLE_EXPR:
2550 return COMPCODE_UNLE;
2551 case UNGT_EXPR:
2552 return COMPCODE_UNGT;
2553 case LTGT_EXPR:
2554 return COMPCODE_LTGT;
2555 case UNGE_EXPR:
2556 return COMPCODE_UNGE;
2557 default:
2558 gcc_unreachable ();
2559 }
2560 }
2561
2562 /* Convert a compcode bit-based encoding of a comparison operator back
2563 to GCC's enum tree_code representation. This function is the
2564 inverse of comparison_to_compcode. */
2565
2566 static enum tree_code
2567 compcode_to_comparison (enum comparison_code code)
2568 {
2569 switch (code)
2570 {
2571 case COMPCODE_LT:
2572 return LT_EXPR;
2573 case COMPCODE_EQ:
2574 return EQ_EXPR;
2575 case COMPCODE_LE:
2576 return LE_EXPR;
2577 case COMPCODE_GT:
2578 return GT_EXPR;
2579 case COMPCODE_NE:
2580 return NE_EXPR;
2581 case COMPCODE_GE:
2582 return GE_EXPR;
2583 case COMPCODE_ORD:
2584 return ORDERED_EXPR;
2585 case COMPCODE_UNORD:
2586 return UNORDERED_EXPR;
2587 case COMPCODE_UNLT:
2588 return UNLT_EXPR;
2589 case COMPCODE_UNEQ:
2590 return UNEQ_EXPR;
2591 case COMPCODE_UNLE:
2592 return UNLE_EXPR;
2593 case COMPCODE_UNGT:
2594 return UNGT_EXPR;
2595 case COMPCODE_LTGT:
2596 return LTGT_EXPR;
2597 case COMPCODE_UNGE:
2598 return UNGE_EXPR;
2599 default:
2600 gcc_unreachable ();
2601 }
2602 }
2603
2604 /* Return a tree for the comparison which is the combination of
2605 doing the AND or OR (depending on CODE) of the two operations LCODE
2606 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2607 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2608 if this makes the transformation invalid. */
2609
2610 tree
2611 combine_comparisons (location_t loc,
2612 enum tree_code code, enum tree_code lcode,
2613 enum tree_code rcode, tree truth_type,
2614 tree ll_arg, tree lr_arg)
2615 {
2616 bool honor_nans = HONOR_NANS (ll_arg);
2617 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2618 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2619 int compcode;
2620
2621 switch (code)
2622 {
2623 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2624 compcode = lcompcode & rcompcode;
2625 break;
2626
2627 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2628 compcode = lcompcode | rcompcode;
2629 break;
2630
2631 default:
2632 return NULL_TREE;
2633 }
2634
2635 if (!honor_nans)
2636 {
2637 /* Eliminate unordered comparisons, as well as LTGT and ORD
2638 which are not used unless the mode has NaNs. */
2639 compcode &= ~COMPCODE_UNORD;
2640 if (compcode == COMPCODE_LTGT)
2641 compcode = COMPCODE_NE;
2642 else if (compcode == COMPCODE_ORD)
2643 compcode = COMPCODE_TRUE;
2644 }
2645 else if (flag_trapping_math)
2646 {
2647 /* Check that the original operation and the optimized ones will trap
2648 under the same condition. */
2649 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2650 && (lcompcode != COMPCODE_EQ)
2651 && (lcompcode != COMPCODE_ORD);
2652 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2653 && (rcompcode != COMPCODE_EQ)
2654 && (rcompcode != COMPCODE_ORD);
2655 bool trap = (compcode & COMPCODE_UNORD) == 0
2656 && (compcode != COMPCODE_EQ)
2657 && (compcode != COMPCODE_ORD);
2658
2659 /* In a short-circuited boolean expression the LHS might be
2660 such that the RHS, if evaluated, will never trap. For
2661 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2662 if neither x nor y is NaN. (This is a mixed blessing: for
2663 example, the expression above will never trap, hence
2664 optimizing it to x < y would be invalid). */
2665 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2666 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2667 rtrap = false;
2668
2669 /* If the comparison was short-circuited, and only the RHS
2670 trapped, we may now generate a spurious trap. */
2671 if (rtrap && !ltrap
2672 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2673 return NULL_TREE;
2674
2675 /* If we changed the conditions that cause a trap, we lose. */
2676 if ((ltrap || rtrap) != trap)
2677 return NULL_TREE;
2678 }
2679
2680 if (compcode == COMPCODE_TRUE)
2681 return constant_boolean_node (true, truth_type);
2682 else if (compcode == COMPCODE_FALSE)
2683 return constant_boolean_node (false, truth_type);
2684 else
2685 {
2686 enum tree_code tcode;
2687
2688 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2689 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2690 }
2691 }
2692 \f
2693 /* Return nonzero if two operands (typically of the same tree node)
2694 are necessarily equal. If either argument has side-effects this
2695 function returns zero. FLAGS modifies behavior as follows:
2696
2697 If OEP_ONLY_CONST is set, only return nonzero for constants.
2698 This function tests whether the operands are indistinguishable;
2699 it does not test whether they are equal using C's == operation.
2700 The distinction is important for IEEE floating point, because
2701 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2702 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2703
2704 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2705 even though it may hold multiple values during a function.
2706 This is because a GCC tree node guarantees that nothing else is
2707 executed between the evaluation of its "operands" (which may often
2708 be evaluated in arbitrary order). Hence if the operands themselves
2709 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2710 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2711 unset means assuming isochronic (or instantaneous) tree equivalence.
2712 Unless comparing arbitrary expression trees, such as from different
2713 statements, this flag can usually be left unset.
2714
2715 If OEP_PURE_SAME is set, then pure functions with identical arguments
2716 are considered the same. It is used when the caller has other ways
2717 to ensure that global memory is unchanged in between. */
2718
2719 int
2720 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2721 {
2722 /* If either is ERROR_MARK, they aren't equal. */
2723 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2724 || TREE_TYPE (arg0) == error_mark_node
2725 || TREE_TYPE (arg1) == error_mark_node)
2726 return 0;
2727
2728 /* Similar, if either does not have a type (like a released SSA name),
2729 they aren't equal. */
2730 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2731 return 0;
2732
2733 /* Check equality of integer constants before bailing out due to
2734 precision differences. */
2735 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2736 return tree_int_cst_equal (arg0, arg1);
2737
2738 /* If both types don't have the same signedness, then we can't consider
2739 them equal. We must check this before the STRIP_NOPS calls
2740 because they may change the signedness of the arguments. As pointers
2741 strictly don't have a signedness, require either two pointers or
2742 two non-pointers as well. */
2743 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2744 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2745 return 0;
2746
2747 /* We cannot consider pointers to different address space equal. */
2748 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2749 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2750 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2751 return 0;
2752
2753 /* If both types don't have the same precision, then it is not safe
2754 to strip NOPs. */
2755 if (element_precision (TREE_TYPE (arg0))
2756 != element_precision (TREE_TYPE (arg1)))
2757 return 0;
2758
2759 STRIP_NOPS (arg0);
2760 STRIP_NOPS (arg1);
2761
2762 /* In case both args are comparisons but with different comparison
2763 code, try to swap the comparison operands of one arg to produce
2764 a match and compare that variant. */
2765 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2766 && COMPARISON_CLASS_P (arg0)
2767 && COMPARISON_CLASS_P (arg1))
2768 {
2769 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2770
2771 if (TREE_CODE (arg0) == swap_code)
2772 return operand_equal_p (TREE_OPERAND (arg0, 0),
2773 TREE_OPERAND (arg1, 1), flags)
2774 && operand_equal_p (TREE_OPERAND (arg0, 1),
2775 TREE_OPERAND (arg1, 0), flags);
2776 }
2777
2778 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2779 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2780 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2781 return 0;
2782
2783 /* This is needed for conversions and for COMPONENT_REF.
2784 Might as well play it safe and always test this. */
2785 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2786 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2787 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2788 return 0;
2789
2790 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2791 We don't care about side effects in that case because the SAVE_EXPR
2792 takes care of that for us. In all other cases, two expressions are
2793 equal if they have no side effects. If we have two identical
2794 expressions with side effects that should be treated the same due
2795 to the only side effects being identical SAVE_EXPR's, that will
2796 be detected in the recursive calls below.
2797 If we are taking an invariant address of two identical objects
2798 they are necessarily equal as well. */
2799 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2800 && (TREE_CODE (arg0) == SAVE_EXPR
2801 || (flags & OEP_CONSTANT_ADDRESS_OF)
2802 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2803 return 1;
2804
2805 /* Next handle constant cases, those for which we can return 1 even
2806 if ONLY_CONST is set. */
2807 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2808 switch (TREE_CODE (arg0))
2809 {
2810 case INTEGER_CST:
2811 return tree_int_cst_equal (arg0, arg1);
2812
2813 case FIXED_CST:
2814 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2815 TREE_FIXED_CST (arg1));
2816
2817 case REAL_CST:
2818 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2819 TREE_REAL_CST (arg1)))
2820 return 1;
2821
2822
2823 if (!HONOR_SIGNED_ZEROS (arg0))
2824 {
2825 /* If we do not distinguish between signed and unsigned zero,
2826 consider them equal. */
2827 if (real_zerop (arg0) && real_zerop (arg1))
2828 return 1;
2829 }
2830 return 0;
2831
2832 case VECTOR_CST:
2833 {
2834 unsigned i;
2835
2836 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2837 return 0;
2838
2839 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2840 {
2841 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2842 VECTOR_CST_ELT (arg1, i), flags))
2843 return 0;
2844 }
2845 return 1;
2846 }
2847
2848 case COMPLEX_CST:
2849 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2850 flags)
2851 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2852 flags));
2853
2854 case STRING_CST:
2855 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2856 && ! memcmp (TREE_STRING_POINTER (arg0),
2857 TREE_STRING_POINTER (arg1),
2858 TREE_STRING_LENGTH (arg0)));
2859
2860 case ADDR_EXPR:
2861 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2862 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2863 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2864 default:
2865 break;
2866 }
2867
2868 if (flags & OEP_ONLY_CONST)
2869 return 0;
2870
2871 /* Define macros to test an operand from arg0 and arg1 for equality and a
2872 variant that allows null and views null as being different from any
2873 non-null value. In the latter case, if either is null, the both
2874 must be; otherwise, do the normal comparison. */
2875 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2876 TREE_OPERAND (arg1, N), flags)
2877
2878 #define OP_SAME_WITH_NULL(N) \
2879 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2880 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2881
2882 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2883 {
2884 case tcc_unary:
2885 /* Two conversions are equal only if signedness and modes match. */
2886 switch (TREE_CODE (arg0))
2887 {
2888 CASE_CONVERT:
2889 case FIX_TRUNC_EXPR:
2890 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2891 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2892 return 0;
2893 break;
2894 default:
2895 break;
2896 }
2897
2898 return OP_SAME (0);
2899
2900
2901 case tcc_comparison:
2902 case tcc_binary:
2903 if (OP_SAME (0) && OP_SAME (1))
2904 return 1;
2905
2906 /* For commutative ops, allow the other order. */
2907 return (commutative_tree_code (TREE_CODE (arg0))
2908 && operand_equal_p (TREE_OPERAND (arg0, 0),
2909 TREE_OPERAND (arg1, 1), flags)
2910 && operand_equal_p (TREE_OPERAND (arg0, 1),
2911 TREE_OPERAND (arg1, 0), flags));
2912
2913 case tcc_reference:
2914 /* If either of the pointer (or reference) expressions we are
2915 dereferencing contain a side effect, these cannot be equal,
2916 but their addresses can be. */
2917 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2918 && (TREE_SIDE_EFFECTS (arg0)
2919 || TREE_SIDE_EFFECTS (arg1)))
2920 return 0;
2921
2922 switch (TREE_CODE (arg0))
2923 {
2924 case INDIRECT_REF:
2925 if (!(flags & OEP_ADDRESS_OF)
2926 && (TYPE_ALIGN (TREE_TYPE (arg0))
2927 != TYPE_ALIGN (TREE_TYPE (arg1))))
2928 return 0;
2929 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2930 return OP_SAME (0);
2931
2932 case REALPART_EXPR:
2933 case IMAGPART_EXPR:
2934 return OP_SAME (0);
2935
2936 case TARGET_MEM_REF:
2937 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2938 /* Require equal extra operands and then fall through to MEM_REF
2939 handling of the two common operands. */
2940 if (!OP_SAME_WITH_NULL (2)
2941 || !OP_SAME_WITH_NULL (3)
2942 || !OP_SAME_WITH_NULL (4))
2943 return 0;
2944 /* Fallthru. */
2945 case MEM_REF:
2946 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2947 /* Require equal access sizes, and similar pointer types.
2948 We can have incomplete types for array references of
2949 variable-sized arrays from the Fortran frontend
2950 though. Also verify the types are compatible. */
2951 return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2952 || (TYPE_SIZE (TREE_TYPE (arg0))
2953 && TYPE_SIZE (TREE_TYPE (arg1))
2954 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2955 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2956 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2957 && ((flags & OEP_ADDRESS_OF)
2958 || (alias_ptr_types_compatible_p
2959 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2960 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2961 && (MR_DEPENDENCE_CLIQUE (arg0)
2962 == MR_DEPENDENCE_CLIQUE (arg1))
2963 && (MR_DEPENDENCE_BASE (arg0)
2964 == MR_DEPENDENCE_BASE (arg1))
2965 && (TYPE_ALIGN (TREE_TYPE (arg0))
2966 == TYPE_ALIGN (TREE_TYPE (arg1)))))
2967 && OP_SAME (0) && OP_SAME (1));
2968
2969 case ARRAY_REF:
2970 case ARRAY_RANGE_REF:
2971 /* Operands 2 and 3 may be null.
2972 Compare the array index by value if it is constant first as we
2973 may have different types but same value here. */
2974 if (!OP_SAME (0))
2975 return 0;
2976 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2977 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2978 TREE_OPERAND (arg1, 1))
2979 || OP_SAME (1))
2980 && OP_SAME_WITH_NULL (2)
2981 && OP_SAME_WITH_NULL (3));
2982
2983 case COMPONENT_REF:
2984 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2985 may be NULL when we're called to compare MEM_EXPRs. */
2986 if (!OP_SAME_WITH_NULL (0)
2987 || !OP_SAME (1))
2988 return 0;
2989 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2990 return OP_SAME_WITH_NULL (2);
2991
2992 case BIT_FIELD_REF:
2993 if (!OP_SAME (0))
2994 return 0;
2995 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2996 return OP_SAME (1) && OP_SAME (2);
2997
2998 default:
2999 return 0;
3000 }
3001
3002 case tcc_expression:
3003 switch (TREE_CODE (arg0))
3004 {
3005 case ADDR_EXPR:
3006 return operand_equal_p (TREE_OPERAND (arg0, 0),
3007 TREE_OPERAND (arg1, 0),
3008 flags | OEP_ADDRESS_OF);
3009
3010 case TRUTH_NOT_EXPR:
3011 return OP_SAME (0);
3012
3013 case TRUTH_ANDIF_EXPR:
3014 case TRUTH_ORIF_EXPR:
3015 return OP_SAME (0) && OP_SAME (1);
3016
3017 case FMA_EXPR:
3018 case WIDEN_MULT_PLUS_EXPR:
3019 case WIDEN_MULT_MINUS_EXPR:
3020 if (!OP_SAME (2))
3021 return 0;
3022 /* The multiplcation operands are commutative. */
3023 /* FALLTHRU */
3024
3025 case TRUTH_AND_EXPR:
3026 case TRUTH_OR_EXPR:
3027 case TRUTH_XOR_EXPR:
3028 if (OP_SAME (0) && OP_SAME (1))
3029 return 1;
3030
3031 /* Otherwise take into account this is a commutative operation. */
3032 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3033 TREE_OPERAND (arg1, 1), flags)
3034 && operand_equal_p (TREE_OPERAND (arg0, 1),
3035 TREE_OPERAND (arg1, 0), flags));
3036
3037 case COND_EXPR:
3038 case VEC_COND_EXPR:
3039 case DOT_PROD_EXPR:
3040 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3041
3042 default:
3043 return 0;
3044 }
3045
3046 case tcc_vl_exp:
3047 switch (TREE_CODE (arg0))
3048 {
3049 case CALL_EXPR:
3050 /* If the CALL_EXPRs call different functions, then they
3051 clearly can not be equal. */
3052 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3053 flags))
3054 return 0;
3055
3056 {
3057 unsigned int cef = call_expr_flags (arg0);
3058 if (flags & OEP_PURE_SAME)
3059 cef &= ECF_CONST | ECF_PURE;
3060 else
3061 cef &= ECF_CONST;
3062 if (!cef)
3063 return 0;
3064 }
3065
3066 /* Now see if all the arguments are the same. */
3067 {
3068 const_call_expr_arg_iterator iter0, iter1;
3069 const_tree a0, a1;
3070 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3071 a1 = first_const_call_expr_arg (arg1, &iter1);
3072 a0 && a1;
3073 a0 = next_const_call_expr_arg (&iter0),
3074 a1 = next_const_call_expr_arg (&iter1))
3075 if (! operand_equal_p (a0, a1, flags))
3076 return 0;
3077
3078 /* If we get here and both argument lists are exhausted
3079 then the CALL_EXPRs are equal. */
3080 return ! (a0 || a1);
3081 }
3082 default:
3083 return 0;
3084 }
3085
3086 case tcc_declaration:
3087 /* Consider __builtin_sqrt equal to sqrt. */
3088 return (TREE_CODE (arg0) == FUNCTION_DECL
3089 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3090 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3091 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3092
3093 default:
3094 return 0;
3095 }
3096
3097 #undef OP_SAME
3098 #undef OP_SAME_WITH_NULL
3099 }
3100 \f
3101 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3102 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3103
3104 When in doubt, return 0. */
3105
3106 static int
3107 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3108 {
3109 int unsignedp1, unsignedpo;
3110 tree primarg0, primarg1, primother;
3111 unsigned int correct_width;
3112
3113 if (operand_equal_p (arg0, arg1, 0))
3114 return 1;
3115
3116 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3117 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3118 return 0;
3119
3120 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3121 and see if the inner values are the same. This removes any
3122 signedness comparison, which doesn't matter here. */
3123 primarg0 = arg0, primarg1 = arg1;
3124 STRIP_NOPS (primarg0);
3125 STRIP_NOPS (primarg1);
3126 if (operand_equal_p (primarg0, primarg1, 0))
3127 return 1;
3128
3129 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3130 actual comparison operand, ARG0.
3131
3132 First throw away any conversions to wider types
3133 already present in the operands. */
3134
3135 primarg1 = get_narrower (arg1, &unsignedp1);
3136 primother = get_narrower (other, &unsignedpo);
3137
3138 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3139 if (unsignedp1 == unsignedpo
3140 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3141 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3142 {
3143 tree type = TREE_TYPE (arg0);
3144
3145 /* Make sure shorter operand is extended the right way
3146 to match the longer operand. */
3147 primarg1 = fold_convert (signed_or_unsigned_type_for
3148 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3149
3150 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3151 return 1;
3152 }
3153
3154 return 0;
3155 }
3156 \f
3157 /* See if ARG is an expression that is either a comparison or is performing
3158 arithmetic on comparisons. The comparisons must only be comparing
3159 two different values, which will be stored in *CVAL1 and *CVAL2; if
3160 they are nonzero it means that some operands have already been found.
3161 No variables may be used anywhere else in the expression except in the
3162 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3163 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3164
3165 If this is true, return 1. Otherwise, return zero. */
3166
3167 static int
3168 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3169 {
3170 enum tree_code code = TREE_CODE (arg);
3171 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3172
3173 /* We can handle some of the tcc_expression cases here. */
3174 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3175 tclass = tcc_unary;
3176 else if (tclass == tcc_expression
3177 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3178 || code == COMPOUND_EXPR))
3179 tclass = tcc_binary;
3180
3181 else if (tclass == tcc_expression && code == SAVE_EXPR
3182 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3183 {
3184 /* If we've already found a CVAL1 or CVAL2, this expression is
3185 two complex to handle. */
3186 if (*cval1 || *cval2)
3187 return 0;
3188
3189 tclass = tcc_unary;
3190 *save_p = 1;
3191 }
3192
3193 switch (tclass)
3194 {
3195 case tcc_unary:
3196 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3197
3198 case tcc_binary:
3199 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3200 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3201 cval1, cval2, save_p));
3202
3203 case tcc_constant:
3204 return 1;
3205
3206 case tcc_expression:
3207 if (code == COND_EXPR)
3208 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3209 cval1, cval2, save_p)
3210 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3211 cval1, cval2, save_p)
3212 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3213 cval1, cval2, save_p));
3214 return 0;
3215
3216 case tcc_comparison:
3217 /* First see if we can handle the first operand, then the second. For
3218 the second operand, we know *CVAL1 can't be zero. It must be that
3219 one side of the comparison is each of the values; test for the
3220 case where this isn't true by failing if the two operands
3221 are the same. */
3222
3223 if (operand_equal_p (TREE_OPERAND (arg, 0),
3224 TREE_OPERAND (arg, 1), 0))
3225 return 0;
3226
3227 if (*cval1 == 0)
3228 *cval1 = TREE_OPERAND (arg, 0);
3229 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3230 ;
3231 else if (*cval2 == 0)
3232 *cval2 = TREE_OPERAND (arg, 0);
3233 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3234 ;
3235 else
3236 return 0;
3237
3238 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3239 ;
3240 else if (*cval2 == 0)
3241 *cval2 = TREE_OPERAND (arg, 1);
3242 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3243 ;
3244 else
3245 return 0;
3246
3247 return 1;
3248
3249 default:
3250 return 0;
3251 }
3252 }
3253 \f
3254 /* ARG is a tree that is known to contain just arithmetic operations and
3255 comparisons. Evaluate the operations in the tree substituting NEW0 for
3256 any occurrence of OLD0 as an operand of a comparison and likewise for
3257 NEW1 and OLD1. */
3258
3259 static tree
3260 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3261 tree old1, tree new1)
3262 {
3263 tree type = TREE_TYPE (arg);
3264 enum tree_code code = TREE_CODE (arg);
3265 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3266
3267 /* We can handle some of the tcc_expression cases here. */
3268 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3269 tclass = tcc_unary;
3270 else if (tclass == tcc_expression
3271 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3272 tclass = tcc_binary;
3273
3274 switch (tclass)
3275 {
3276 case tcc_unary:
3277 return fold_build1_loc (loc, code, type,
3278 eval_subst (loc, TREE_OPERAND (arg, 0),
3279 old0, new0, old1, new1));
3280
3281 case tcc_binary:
3282 return fold_build2_loc (loc, code, type,
3283 eval_subst (loc, TREE_OPERAND (arg, 0),
3284 old0, new0, old1, new1),
3285 eval_subst (loc, TREE_OPERAND (arg, 1),
3286 old0, new0, old1, new1));
3287
3288 case tcc_expression:
3289 switch (code)
3290 {
3291 case SAVE_EXPR:
3292 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3293 old1, new1);
3294
3295 case COMPOUND_EXPR:
3296 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3297 old1, new1);
3298
3299 case COND_EXPR:
3300 return fold_build3_loc (loc, code, type,
3301 eval_subst (loc, TREE_OPERAND (arg, 0),
3302 old0, new0, old1, new1),
3303 eval_subst (loc, TREE_OPERAND (arg, 1),
3304 old0, new0, old1, new1),
3305 eval_subst (loc, TREE_OPERAND (arg, 2),
3306 old0, new0, old1, new1));
3307 default:
3308 break;
3309 }
3310 /* Fall through - ??? */
3311
3312 case tcc_comparison:
3313 {
3314 tree arg0 = TREE_OPERAND (arg, 0);
3315 tree arg1 = TREE_OPERAND (arg, 1);
3316
3317 /* We need to check both for exact equality and tree equality. The
3318 former will be true if the operand has a side-effect. In that
3319 case, we know the operand occurred exactly once. */
3320
3321 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3322 arg0 = new0;
3323 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3324 arg0 = new1;
3325
3326 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3327 arg1 = new0;
3328 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3329 arg1 = new1;
3330
3331 return fold_build2_loc (loc, code, type, arg0, arg1);
3332 }
3333
3334 default:
3335 return arg;
3336 }
3337 }
3338 \f
3339 /* Return a tree for the case when the result of an expression is RESULT
3340 converted to TYPE and OMITTED was previously an operand of the expression
3341 but is now not needed (e.g., we folded OMITTED * 0).
3342
3343 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3344 the conversion of RESULT to TYPE. */
3345
3346 tree
3347 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3348 {
3349 tree t = fold_convert_loc (loc, type, result);
3350
3351 /* If the resulting operand is an empty statement, just return the omitted
3352 statement casted to void. */
3353 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3354 return build1_loc (loc, NOP_EXPR, void_type_node,
3355 fold_ignored_result (omitted));
3356
3357 if (TREE_SIDE_EFFECTS (omitted))
3358 return build2_loc (loc, COMPOUND_EXPR, type,
3359 fold_ignored_result (omitted), t);
3360
3361 return non_lvalue_loc (loc, t);
3362 }
3363
3364 /* Return a tree for the case when the result of an expression is RESULT
3365 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3366 of the expression but are now not needed.
3367
3368 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3369 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3370 evaluated before OMITTED2. Otherwise, if neither has side effects,
3371 just do the conversion of RESULT to TYPE. */
3372
3373 tree
3374 omit_two_operands_loc (location_t loc, tree type, tree result,
3375 tree omitted1, tree omitted2)
3376 {
3377 tree t = fold_convert_loc (loc, type, result);
3378
3379 if (TREE_SIDE_EFFECTS (omitted2))
3380 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3381 if (TREE_SIDE_EFFECTS (omitted1))
3382 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3383
3384 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3385 }
3386
3387 \f
3388 /* Return a simplified tree node for the truth-negation of ARG. This
3389 never alters ARG itself. We assume that ARG is an operation that
3390 returns a truth value (0 or 1).
3391
3392 FIXME: one would think we would fold the result, but it causes
3393 problems with the dominator optimizer. */
3394
3395 static tree
3396 fold_truth_not_expr (location_t loc, tree arg)
3397 {
3398 tree type = TREE_TYPE (arg);
3399 enum tree_code code = TREE_CODE (arg);
3400 location_t loc1, loc2;
3401
3402 /* If this is a comparison, we can simply invert it, except for
3403 floating-point non-equality comparisons, in which case we just
3404 enclose a TRUTH_NOT_EXPR around what we have. */
3405
3406 if (TREE_CODE_CLASS (code) == tcc_comparison)
3407 {
3408 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3409 if (FLOAT_TYPE_P (op_type)
3410 && flag_trapping_math
3411 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3412 && code != NE_EXPR && code != EQ_EXPR)
3413 return NULL_TREE;
3414
3415 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3416 if (code == ERROR_MARK)
3417 return NULL_TREE;
3418
3419 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3420 TREE_OPERAND (arg, 1));
3421 }
3422
3423 switch (code)
3424 {
3425 case INTEGER_CST:
3426 return constant_boolean_node (integer_zerop (arg), type);
3427
3428 case TRUTH_AND_EXPR:
3429 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3430 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3431 return build2_loc (loc, TRUTH_OR_EXPR, type,
3432 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3433 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3434
3435 case TRUTH_OR_EXPR:
3436 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3437 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3438 return build2_loc (loc, TRUTH_AND_EXPR, type,
3439 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3440 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3441
3442 case TRUTH_XOR_EXPR:
3443 /* Here we can invert either operand. We invert the first operand
3444 unless the second operand is a TRUTH_NOT_EXPR in which case our
3445 result is the XOR of the first operand with the inside of the
3446 negation of the second operand. */
3447
3448 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3449 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3450 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3451 else
3452 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3453 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3454 TREE_OPERAND (arg, 1));
3455
3456 case TRUTH_ANDIF_EXPR:
3457 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3458 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3459 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3460 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3461 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3462
3463 case TRUTH_ORIF_EXPR:
3464 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3465 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3466 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3467 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3468 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3469
3470 case TRUTH_NOT_EXPR:
3471 return TREE_OPERAND (arg, 0);
3472
3473 case COND_EXPR:
3474 {
3475 tree arg1 = TREE_OPERAND (arg, 1);
3476 tree arg2 = TREE_OPERAND (arg, 2);
3477
3478 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3479 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3480
3481 /* A COND_EXPR may have a throw as one operand, which
3482 then has void type. Just leave void operands
3483 as they are. */
3484 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3485 VOID_TYPE_P (TREE_TYPE (arg1))
3486 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3487 VOID_TYPE_P (TREE_TYPE (arg2))
3488 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3489 }
3490
3491 case COMPOUND_EXPR:
3492 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3493 return build2_loc (loc, COMPOUND_EXPR, type,
3494 TREE_OPERAND (arg, 0),
3495 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3496
3497 case NON_LVALUE_EXPR:
3498 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3499 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3500
3501 CASE_CONVERT:
3502 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3503 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3504
3505 /* ... fall through ... */
3506
3507 case FLOAT_EXPR:
3508 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3509 return build1_loc (loc, TREE_CODE (arg), type,
3510 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3511
3512 case BIT_AND_EXPR:
3513 if (!integer_onep (TREE_OPERAND (arg, 1)))
3514 return NULL_TREE;
3515 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3516
3517 case SAVE_EXPR:
3518 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3519
3520 case CLEANUP_POINT_EXPR:
3521 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3522 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3523 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3524
3525 default:
3526 return NULL_TREE;
3527 }
3528 }
3529
3530 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3531 assume that ARG is an operation that returns a truth value (0 or 1
3532 for scalars, 0 or -1 for vectors). Return the folded expression if
3533 folding is successful. Otherwise, return NULL_TREE. */
3534
3535 static tree
3536 fold_invert_truthvalue (location_t loc, tree arg)
3537 {
3538 tree type = TREE_TYPE (arg);
3539 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3540 ? BIT_NOT_EXPR
3541 : TRUTH_NOT_EXPR,
3542 type, arg);
3543 }
3544
3545 /* Return a simplified tree node for the truth-negation of ARG. This
3546 never alters ARG itself. We assume that ARG is an operation that
3547 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3548
3549 tree
3550 invert_truthvalue_loc (location_t loc, tree arg)
3551 {
3552 if (TREE_CODE (arg) == ERROR_MARK)
3553 return arg;
3554
3555 tree type = TREE_TYPE (arg);
3556 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3557 ? BIT_NOT_EXPR
3558 : TRUTH_NOT_EXPR,
3559 type, arg);
3560 }
3561
3562 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3563 operands are another bit-wise operation with a common input. If so,
3564 distribute the bit operations to save an operation and possibly two if
3565 constants are involved. For example, convert
3566 (A | B) & (A | C) into A | (B & C)
3567 Further simplification will occur if B and C are constants.
3568
3569 If this optimization cannot be done, 0 will be returned. */
3570
3571 static tree
3572 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3573 tree arg0, tree arg1)
3574 {
3575 tree common;
3576 tree left, right;
3577
3578 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3579 || TREE_CODE (arg0) == code
3580 || (TREE_CODE (arg0) != BIT_AND_EXPR
3581 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3582 return 0;
3583
3584 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3585 {
3586 common = TREE_OPERAND (arg0, 0);
3587 left = TREE_OPERAND (arg0, 1);
3588 right = TREE_OPERAND (arg1, 1);
3589 }
3590 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3591 {
3592 common = TREE_OPERAND (arg0, 0);
3593 left = TREE_OPERAND (arg0, 1);
3594 right = TREE_OPERAND (arg1, 0);
3595 }
3596 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3597 {
3598 common = TREE_OPERAND (arg0, 1);
3599 left = TREE_OPERAND (arg0, 0);
3600 right = TREE_OPERAND (arg1, 1);
3601 }
3602 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3603 {
3604 common = TREE_OPERAND (arg0, 1);
3605 left = TREE_OPERAND (arg0, 0);
3606 right = TREE_OPERAND (arg1, 0);
3607 }
3608 else
3609 return 0;
3610
3611 common = fold_convert_loc (loc, type, common);
3612 left = fold_convert_loc (loc, type, left);
3613 right = fold_convert_loc (loc, type, right);
3614 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3615 fold_build2_loc (loc, code, type, left, right));
3616 }
3617
3618 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3619 with code CODE. This optimization is unsafe. */
3620 static tree
3621 distribute_real_division (location_t loc, enum tree_code code, tree type,
3622 tree arg0, tree arg1)
3623 {
3624 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3625 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3626
3627 /* (A / C) +- (B / C) -> (A +- B) / C. */
3628 if (mul0 == mul1
3629 && operand_equal_p (TREE_OPERAND (arg0, 1),
3630 TREE_OPERAND (arg1, 1), 0))
3631 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3632 fold_build2_loc (loc, code, type,
3633 TREE_OPERAND (arg0, 0),
3634 TREE_OPERAND (arg1, 0)),
3635 TREE_OPERAND (arg0, 1));
3636
3637 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3638 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3639 TREE_OPERAND (arg1, 0), 0)
3640 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3641 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3642 {
3643 REAL_VALUE_TYPE r0, r1;
3644 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3645 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3646 if (!mul0)
3647 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3648 if (!mul1)
3649 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3650 real_arithmetic (&r0, code, &r0, &r1);
3651 return fold_build2_loc (loc, MULT_EXPR, type,
3652 TREE_OPERAND (arg0, 0),
3653 build_real (type, r0));
3654 }
3655
3656 return NULL_TREE;
3657 }
3658 \f
3659 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3660 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3661
3662 static tree
3663 make_bit_field_ref (location_t loc, tree inner, tree type,
3664 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3665 {
3666 tree result, bftype;
3667
3668 if (bitpos == 0)
3669 {
3670 tree size = TYPE_SIZE (TREE_TYPE (inner));
3671 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3672 || POINTER_TYPE_P (TREE_TYPE (inner)))
3673 && tree_fits_shwi_p (size)
3674 && tree_to_shwi (size) == bitsize)
3675 return fold_convert_loc (loc, type, inner);
3676 }
3677
3678 bftype = type;
3679 if (TYPE_PRECISION (bftype) != bitsize
3680 || TYPE_UNSIGNED (bftype) == !unsignedp)
3681 bftype = build_nonstandard_integer_type (bitsize, 0);
3682
3683 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3684 size_int (bitsize), bitsize_int (bitpos));
3685
3686 if (bftype != type)
3687 result = fold_convert_loc (loc, type, result);
3688
3689 return result;
3690 }
3691
3692 /* Optimize a bit-field compare.
3693
3694 There are two cases: First is a compare against a constant and the
3695 second is a comparison of two items where the fields are at the same
3696 bit position relative to the start of a chunk (byte, halfword, word)
3697 large enough to contain it. In these cases we can avoid the shift
3698 implicit in bitfield extractions.
3699
3700 For constants, we emit a compare of the shifted constant with the
3701 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3702 compared. For two fields at the same position, we do the ANDs with the
3703 similar mask and compare the result of the ANDs.
3704
3705 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3706 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3707 are the left and right operands of the comparison, respectively.
3708
3709 If the optimization described above can be done, we return the resulting
3710 tree. Otherwise we return zero. */
3711
3712 static tree
3713 optimize_bit_field_compare (location_t loc, enum tree_code code,
3714 tree compare_type, tree lhs, tree rhs)
3715 {
3716 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3717 tree type = TREE_TYPE (lhs);
3718 tree unsigned_type;
3719 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3720 machine_mode lmode, rmode, nmode;
3721 int lunsignedp, runsignedp;
3722 int lvolatilep = 0, rvolatilep = 0;
3723 tree linner, rinner = NULL_TREE;
3724 tree mask;
3725 tree offset;
3726
3727 /* Get all the information about the extractions being done. If the bit size
3728 if the same as the size of the underlying object, we aren't doing an
3729 extraction at all and so can do nothing. We also don't want to
3730 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3731 then will no longer be able to replace it. */
3732 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3733 &lunsignedp, &lvolatilep, false);
3734 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3735 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3736 return 0;
3737
3738 if (!const_p)
3739 {
3740 /* If this is not a constant, we can only do something if bit positions,
3741 sizes, and signedness are the same. */
3742 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3743 &runsignedp, &rvolatilep, false);
3744
3745 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3746 || lunsignedp != runsignedp || offset != 0
3747 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3748 return 0;
3749 }
3750
3751 /* See if we can find a mode to refer to this field. We should be able to,
3752 but fail if we can't. */
3753 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3754 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3755 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3756 TYPE_ALIGN (TREE_TYPE (rinner))),
3757 word_mode, false);
3758 if (nmode == VOIDmode)
3759 return 0;
3760
3761 /* Set signed and unsigned types of the precision of this mode for the
3762 shifts below. */
3763 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3764
3765 /* Compute the bit position and size for the new reference and our offset
3766 within it. If the new reference is the same size as the original, we
3767 won't optimize anything, so return zero. */
3768 nbitsize = GET_MODE_BITSIZE (nmode);
3769 nbitpos = lbitpos & ~ (nbitsize - 1);
3770 lbitpos -= nbitpos;
3771 if (nbitsize == lbitsize)
3772 return 0;
3773
3774 if (BYTES_BIG_ENDIAN)
3775 lbitpos = nbitsize - lbitsize - lbitpos;
3776
3777 /* Make the mask to be used against the extracted field. */
3778 mask = build_int_cst_type (unsigned_type, -1);
3779 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3780 mask = const_binop (RSHIFT_EXPR, mask,
3781 size_int (nbitsize - lbitsize - lbitpos));
3782
3783 if (! const_p)
3784 /* If not comparing with constant, just rework the comparison
3785 and return. */
3786 return fold_build2_loc (loc, code, compare_type,
3787 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3788 make_bit_field_ref (loc, linner,
3789 unsigned_type,
3790 nbitsize, nbitpos,
3791 1),
3792 mask),
3793 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3794 make_bit_field_ref (loc, rinner,
3795 unsigned_type,
3796 nbitsize, nbitpos,
3797 1),
3798 mask));
3799
3800 /* Otherwise, we are handling the constant case. See if the constant is too
3801 big for the field. Warn and return a tree of for 0 (false) if so. We do
3802 this not only for its own sake, but to avoid having to test for this
3803 error case below. If we didn't, we might generate wrong code.
3804
3805 For unsigned fields, the constant shifted right by the field length should
3806 be all zero. For signed fields, the high-order bits should agree with
3807 the sign bit. */
3808
3809 if (lunsignedp)
3810 {
3811 if (wi::lrshift (rhs, lbitsize) != 0)
3812 {
3813 warning (0, "comparison is always %d due to width of bit-field",
3814 code == NE_EXPR);
3815 return constant_boolean_node (code == NE_EXPR, compare_type);
3816 }
3817 }
3818 else
3819 {
3820 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3821 if (tem != 0 && tem != -1)
3822 {
3823 warning (0, "comparison is always %d due to width of bit-field",
3824 code == NE_EXPR);
3825 return constant_boolean_node (code == NE_EXPR, compare_type);
3826 }
3827 }
3828
3829 /* Single-bit compares should always be against zero. */
3830 if (lbitsize == 1 && ! integer_zerop (rhs))
3831 {
3832 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3833 rhs = build_int_cst (type, 0);
3834 }
3835
3836 /* Make a new bitfield reference, shift the constant over the
3837 appropriate number of bits and mask it with the computed mask
3838 (in case this was a signed field). If we changed it, make a new one. */
3839 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3840
3841 rhs = const_binop (BIT_AND_EXPR,
3842 const_binop (LSHIFT_EXPR,
3843 fold_convert_loc (loc, unsigned_type, rhs),
3844 size_int (lbitpos)),
3845 mask);
3846
3847 lhs = build2_loc (loc, code, compare_type,
3848 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3849 return lhs;
3850 }
3851 \f
3852 /* Subroutine for fold_truth_andor_1: decode a field reference.
3853
3854 If EXP is a comparison reference, we return the innermost reference.
3855
3856 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3857 set to the starting bit number.
3858
3859 If the innermost field can be completely contained in a mode-sized
3860 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3861
3862 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3863 otherwise it is not changed.
3864
3865 *PUNSIGNEDP is set to the signedness of the field.
3866
3867 *PMASK is set to the mask used. This is either contained in a
3868 BIT_AND_EXPR or derived from the width of the field.
3869
3870 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3871
3872 Return 0 if this is not a component reference or is one that we can't
3873 do anything with. */
3874
3875 static tree
3876 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3877 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3878 int *punsignedp, int *pvolatilep,
3879 tree *pmask, tree *pand_mask)
3880 {
3881 tree outer_type = 0;
3882 tree and_mask = 0;
3883 tree mask, inner, offset;
3884 tree unsigned_type;
3885 unsigned int precision;
3886
3887 /* All the optimizations using this function assume integer fields.
3888 There are problems with FP fields since the type_for_size call
3889 below can fail for, e.g., XFmode. */
3890 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3891 return 0;
3892
3893 /* We are interested in the bare arrangement of bits, so strip everything
3894 that doesn't affect the machine mode. However, record the type of the
3895 outermost expression if it may matter below. */
3896 if (CONVERT_EXPR_P (exp)
3897 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3898 outer_type = TREE_TYPE (exp);
3899 STRIP_NOPS (exp);
3900
3901 if (TREE_CODE (exp) == BIT_AND_EXPR)
3902 {
3903 and_mask = TREE_OPERAND (exp, 1);
3904 exp = TREE_OPERAND (exp, 0);
3905 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3906 if (TREE_CODE (and_mask) != INTEGER_CST)
3907 return 0;
3908 }
3909
3910 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3911 punsignedp, pvolatilep, false);
3912 if ((inner == exp && and_mask == 0)
3913 || *pbitsize < 0 || offset != 0
3914 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3915 return 0;
3916
3917 /* If the number of bits in the reference is the same as the bitsize of
3918 the outer type, then the outer type gives the signedness. Otherwise
3919 (in case of a small bitfield) the signedness is unchanged. */
3920 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3921 *punsignedp = TYPE_UNSIGNED (outer_type);
3922
3923 /* Compute the mask to access the bitfield. */
3924 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3925 precision = TYPE_PRECISION (unsigned_type);
3926
3927 mask = build_int_cst_type (unsigned_type, -1);
3928
3929 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3930 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3931
3932 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3933 if (and_mask != 0)
3934 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3935 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3936
3937 *pmask = mask;
3938 *pand_mask = and_mask;
3939 return inner;
3940 }
3941
3942 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3943 bit positions and MASK is SIGNED. */
3944
3945 static int
3946 all_ones_mask_p (const_tree mask, unsigned int size)
3947 {
3948 tree type = TREE_TYPE (mask);
3949 unsigned int precision = TYPE_PRECISION (type);
3950
3951 /* If this function returns true when the type of the mask is
3952 UNSIGNED, then there will be errors. In particular see
3953 gcc.c-torture/execute/990326-1.c. There does not appear to be
3954 any documentation paper trail as to why this is so. But the pre
3955 wide-int worked with that restriction and it has been preserved
3956 here. */
3957 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3958 return false;
3959
3960 return wi::mask (size, false, precision) == mask;
3961 }
3962
3963 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3964 represents the sign bit of EXP's type. If EXP represents a sign
3965 or zero extension, also test VAL against the unextended type.
3966 The return value is the (sub)expression whose sign bit is VAL,
3967 or NULL_TREE otherwise. */
3968
3969 tree
3970 sign_bit_p (tree exp, const_tree val)
3971 {
3972 int width;
3973 tree t;
3974
3975 /* Tree EXP must have an integral type. */
3976 t = TREE_TYPE (exp);
3977 if (! INTEGRAL_TYPE_P (t))
3978 return NULL_TREE;
3979
3980 /* Tree VAL must be an integer constant. */
3981 if (TREE_CODE (val) != INTEGER_CST
3982 || TREE_OVERFLOW (val))
3983 return NULL_TREE;
3984
3985 width = TYPE_PRECISION (t);
3986 if (wi::only_sign_bit_p (val, width))
3987 return exp;
3988
3989 /* Handle extension from a narrower type. */
3990 if (TREE_CODE (exp) == NOP_EXPR
3991 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3992 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3993
3994 return NULL_TREE;
3995 }
3996
3997 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3998 to be evaluated unconditionally. */
3999
4000 static int
4001 simple_operand_p (const_tree exp)
4002 {
4003 /* Strip any conversions that don't change the machine mode. */
4004 STRIP_NOPS (exp);
4005
4006 return (CONSTANT_CLASS_P (exp)
4007 || TREE_CODE (exp) == SSA_NAME
4008 || (DECL_P (exp)
4009 && ! TREE_ADDRESSABLE (exp)
4010 && ! TREE_THIS_VOLATILE (exp)
4011 && ! DECL_NONLOCAL (exp)
4012 /* Don't regard global variables as simple. They may be
4013 allocated in ways unknown to the compiler (shared memory,
4014 #pragma weak, etc). */
4015 && ! TREE_PUBLIC (exp)
4016 && ! DECL_EXTERNAL (exp)
4017 /* Weakrefs are not safe to be read, since they can be NULL.
4018 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4019 have DECL_WEAK flag set. */
4020 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4021 /* Loading a static variable is unduly expensive, but global
4022 registers aren't expensive. */
4023 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4024 }
4025
4026 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4027 to be evaluated unconditionally.
4028 I addition to simple_operand_p, we assume that comparisons, conversions,
4029 and logic-not operations are simple, if their operands are simple, too. */
4030
4031 static bool
4032 simple_operand_p_2 (tree exp)
4033 {
4034 enum tree_code code;
4035
4036 if (TREE_SIDE_EFFECTS (exp)
4037 || tree_could_trap_p (exp))
4038 return false;
4039
4040 while (CONVERT_EXPR_P (exp))
4041 exp = TREE_OPERAND (exp, 0);
4042
4043 code = TREE_CODE (exp);
4044
4045 if (TREE_CODE_CLASS (code) == tcc_comparison)
4046 return (simple_operand_p (TREE_OPERAND (exp, 0))
4047 && simple_operand_p (TREE_OPERAND (exp, 1)));
4048
4049 if (code == TRUTH_NOT_EXPR)
4050 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4051
4052 return simple_operand_p (exp);
4053 }
4054
4055 \f
4056 /* The following functions are subroutines to fold_range_test and allow it to
4057 try to change a logical combination of comparisons into a range test.
4058
4059 For example, both
4060 X == 2 || X == 3 || X == 4 || X == 5
4061 and
4062 X >= 2 && X <= 5
4063 are converted to
4064 (unsigned) (X - 2) <= 3
4065
4066 We describe each set of comparisons as being either inside or outside
4067 a range, using a variable named like IN_P, and then describe the
4068 range with a lower and upper bound. If one of the bounds is omitted,
4069 it represents either the highest or lowest value of the type.
4070
4071 In the comments below, we represent a range by two numbers in brackets
4072 preceded by a "+" to designate being inside that range, or a "-" to
4073 designate being outside that range, so the condition can be inverted by
4074 flipping the prefix. An omitted bound is represented by a "-". For
4075 example, "- [-, 10]" means being outside the range starting at the lowest
4076 possible value and ending at 10, in other words, being greater than 10.
4077 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4078 always false.
4079
4080 We set up things so that the missing bounds are handled in a consistent
4081 manner so neither a missing bound nor "true" and "false" need to be
4082 handled using a special case. */
4083
4084 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4085 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4086 and UPPER1_P are nonzero if the respective argument is an upper bound
4087 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4088 must be specified for a comparison. ARG1 will be converted to ARG0's
4089 type if both are specified. */
4090
4091 static tree
4092 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4093 tree arg1, int upper1_p)
4094 {
4095 tree tem;
4096 int result;
4097 int sgn0, sgn1;
4098
4099 /* If neither arg represents infinity, do the normal operation.
4100 Else, if not a comparison, return infinity. Else handle the special
4101 comparison rules. Note that most of the cases below won't occur, but
4102 are handled for consistency. */
4103
4104 if (arg0 != 0 && arg1 != 0)
4105 {
4106 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4107 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4108 STRIP_NOPS (tem);
4109 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4110 }
4111
4112 if (TREE_CODE_CLASS (code) != tcc_comparison)
4113 return 0;
4114
4115 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4116 for neither. In real maths, we cannot assume open ended ranges are
4117 the same. But, this is computer arithmetic, where numbers are finite.
4118 We can therefore make the transformation of any unbounded range with
4119 the value Z, Z being greater than any representable number. This permits
4120 us to treat unbounded ranges as equal. */
4121 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4122 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4123 switch (code)
4124 {
4125 case EQ_EXPR:
4126 result = sgn0 == sgn1;
4127 break;
4128 case NE_EXPR:
4129 result = sgn0 != sgn1;
4130 break;
4131 case LT_EXPR:
4132 result = sgn0 < sgn1;
4133 break;
4134 case LE_EXPR:
4135 result = sgn0 <= sgn1;
4136 break;
4137 case GT_EXPR:
4138 result = sgn0 > sgn1;
4139 break;
4140 case GE_EXPR:
4141 result = sgn0 >= sgn1;
4142 break;
4143 default:
4144 gcc_unreachable ();
4145 }
4146
4147 return constant_boolean_node (result, type);
4148 }
4149 \f
4150 /* Helper routine for make_range. Perform one step for it, return
4151 new expression if the loop should continue or NULL_TREE if it should
4152 stop. */
4153
4154 tree
4155 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4156 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4157 bool *strict_overflow_p)
4158 {
4159 tree arg0_type = TREE_TYPE (arg0);
4160 tree n_low, n_high, low = *p_low, high = *p_high;
4161 int in_p = *p_in_p, n_in_p;
4162
4163 switch (code)
4164 {
4165 case TRUTH_NOT_EXPR:
4166 /* We can only do something if the range is testing for zero. */
4167 if (low == NULL_TREE || high == NULL_TREE
4168 || ! integer_zerop (low) || ! integer_zerop (high))
4169 return NULL_TREE;
4170 *p_in_p = ! in_p;
4171 return arg0;
4172
4173 case EQ_EXPR: case NE_EXPR:
4174 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4175 /* We can only do something if the range is testing for zero
4176 and if the second operand is an integer constant. Note that
4177 saying something is "in" the range we make is done by
4178 complementing IN_P since it will set in the initial case of
4179 being not equal to zero; "out" is leaving it alone. */
4180 if (low == NULL_TREE || high == NULL_TREE
4181 || ! integer_zerop (low) || ! integer_zerop (high)
4182 || TREE_CODE (arg1) != INTEGER_CST)
4183 return NULL_TREE;
4184
4185 switch (code)
4186 {
4187 case NE_EXPR: /* - [c, c] */
4188 low = high = arg1;
4189 break;
4190 case EQ_EXPR: /* + [c, c] */
4191 in_p = ! in_p, low = high = arg1;
4192 break;
4193 case GT_EXPR: /* - [-, c] */
4194 low = 0, high = arg1;
4195 break;
4196 case GE_EXPR: /* + [c, -] */
4197 in_p = ! in_p, low = arg1, high = 0;
4198 break;
4199 case LT_EXPR: /* - [c, -] */
4200 low = arg1, high = 0;
4201 break;
4202 case LE_EXPR: /* + [-, c] */
4203 in_p = ! in_p, low = 0, high = arg1;
4204 break;
4205 default:
4206 gcc_unreachable ();
4207 }
4208
4209 /* If this is an unsigned comparison, we also know that EXP is
4210 greater than or equal to zero. We base the range tests we make
4211 on that fact, so we record it here so we can parse existing
4212 range tests. We test arg0_type since often the return type
4213 of, e.g. EQ_EXPR, is boolean. */
4214 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4215 {
4216 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4217 in_p, low, high, 1,
4218 build_int_cst (arg0_type, 0),
4219 NULL_TREE))
4220 return NULL_TREE;
4221
4222 in_p = n_in_p, low = n_low, high = n_high;
4223
4224 /* If the high bound is missing, but we have a nonzero low
4225 bound, reverse the range so it goes from zero to the low bound
4226 minus 1. */
4227 if (high == 0 && low && ! integer_zerop (low))
4228 {
4229 in_p = ! in_p;
4230 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4231 build_int_cst (TREE_TYPE (low), 1), 0);
4232 low = build_int_cst (arg0_type, 0);
4233 }
4234 }
4235
4236 *p_low = low;
4237 *p_high = high;
4238 *p_in_p = in_p;
4239 return arg0;
4240
4241 case NEGATE_EXPR:
4242 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4243 low and high are non-NULL, then normalize will DTRT. */
4244 if (!TYPE_UNSIGNED (arg0_type)
4245 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4246 {
4247 if (low == NULL_TREE)
4248 low = TYPE_MIN_VALUE (arg0_type);
4249 if (high == NULL_TREE)
4250 high = TYPE_MAX_VALUE (arg0_type);
4251 }
4252
4253 /* (-x) IN [a,b] -> x in [-b, -a] */
4254 n_low = range_binop (MINUS_EXPR, exp_type,
4255 build_int_cst (exp_type, 0),
4256 0, high, 1);
4257 n_high = range_binop (MINUS_EXPR, exp_type,
4258 build_int_cst (exp_type, 0),
4259 0, low, 0);
4260 if (n_high != 0 && TREE_OVERFLOW (n_high))
4261 return NULL_TREE;
4262 goto normalize;
4263
4264 case BIT_NOT_EXPR:
4265 /* ~ X -> -X - 1 */
4266 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4267 build_int_cst (exp_type, 1));
4268
4269 case PLUS_EXPR:
4270 case MINUS_EXPR:
4271 if (TREE_CODE (arg1) != INTEGER_CST)
4272 return NULL_TREE;
4273
4274 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4275 move a constant to the other side. */
4276 if (!TYPE_UNSIGNED (arg0_type)
4277 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4278 return NULL_TREE;
4279
4280 /* If EXP is signed, any overflow in the computation is undefined,
4281 so we don't worry about it so long as our computations on
4282 the bounds don't overflow. For unsigned, overflow is defined
4283 and this is exactly the right thing. */
4284 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4285 arg0_type, low, 0, arg1, 0);
4286 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4287 arg0_type, high, 1, arg1, 0);
4288 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4289 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4290 return NULL_TREE;
4291
4292 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4293 *strict_overflow_p = true;
4294
4295 normalize:
4296 /* Check for an unsigned range which has wrapped around the maximum
4297 value thus making n_high < n_low, and normalize it. */
4298 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4299 {
4300 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4301 build_int_cst (TREE_TYPE (n_high), 1), 0);
4302 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4303 build_int_cst (TREE_TYPE (n_low), 1), 0);
4304
4305 /* If the range is of the form +/- [ x+1, x ], we won't
4306 be able to normalize it. But then, it represents the
4307 whole range or the empty set, so make it
4308 +/- [ -, - ]. */
4309 if (tree_int_cst_equal (n_low, low)
4310 && tree_int_cst_equal (n_high, high))
4311 low = high = 0;
4312 else
4313 in_p = ! in_p;
4314 }
4315 else
4316 low = n_low, high = n_high;
4317
4318 *p_low = low;
4319 *p_high = high;
4320 *p_in_p = in_p;
4321 return arg0;
4322
4323 CASE_CONVERT:
4324 case NON_LVALUE_EXPR:
4325 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4326 return NULL_TREE;
4327
4328 if (! INTEGRAL_TYPE_P (arg0_type)
4329 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4330 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4331 return NULL_TREE;
4332
4333 n_low = low, n_high = high;
4334
4335 if (n_low != 0)
4336 n_low = fold_convert_loc (loc, arg0_type, n_low);
4337
4338 if (n_high != 0)
4339 n_high = fold_convert_loc (loc, arg0_type, n_high);
4340
4341 /* If we're converting arg0 from an unsigned type, to exp,
4342 a signed type, we will be doing the comparison as unsigned.
4343 The tests above have already verified that LOW and HIGH
4344 are both positive.
4345
4346 So we have to ensure that we will handle large unsigned
4347 values the same way that the current signed bounds treat
4348 negative values. */
4349
4350 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4351 {
4352 tree high_positive;
4353 tree equiv_type;
4354 /* For fixed-point modes, we need to pass the saturating flag
4355 as the 2nd parameter. */
4356 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4357 equiv_type
4358 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4359 TYPE_SATURATING (arg0_type));
4360 else
4361 equiv_type
4362 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4363
4364 /* A range without an upper bound is, naturally, unbounded.
4365 Since convert would have cropped a very large value, use
4366 the max value for the destination type. */
4367 high_positive
4368 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4369 : TYPE_MAX_VALUE (arg0_type);
4370
4371 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4372 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4373 fold_convert_loc (loc, arg0_type,
4374 high_positive),
4375 build_int_cst (arg0_type, 1));
4376
4377 /* If the low bound is specified, "and" the range with the
4378 range for which the original unsigned value will be
4379 positive. */
4380 if (low != 0)
4381 {
4382 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4383 1, fold_convert_loc (loc, arg0_type,
4384 integer_zero_node),
4385 high_positive))
4386 return NULL_TREE;
4387
4388 in_p = (n_in_p == in_p);
4389 }
4390 else
4391 {
4392 /* Otherwise, "or" the range with the range of the input
4393 that will be interpreted as negative. */
4394 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4395 1, fold_convert_loc (loc, arg0_type,
4396 integer_zero_node),
4397 high_positive))
4398 return NULL_TREE;
4399
4400 in_p = (in_p != n_in_p);
4401 }
4402 }
4403
4404 *p_low = n_low;
4405 *p_high = n_high;
4406 *p_in_p = in_p;
4407 return arg0;
4408
4409 default:
4410 return NULL_TREE;
4411 }
4412 }
4413
4414 /* Given EXP, a logical expression, set the range it is testing into
4415 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4416 actually being tested. *PLOW and *PHIGH will be made of the same
4417 type as the returned expression. If EXP is not a comparison, we
4418 will most likely not be returning a useful value and range. Set
4419 *STRICT_OVERFLOW_P to true if the return value is only valid
4420 because signed overflow is undefined; otherwise, do not change
4421 *STRICT_OVERFLOW_P. */
4422
4423 tree
4424 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4425 bool *strict_overflow_p)
4426 {
4427 enum tree_code code;
4428 tree arg0, arg1 = NULL_TREE;
4429 tree exp_type, nexp;
4430 int in_p;
4431 tree low, high;
4432 location_t loc = EXPR_LOCATION (exp);
4433
4434 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4435 and see if we can refine the range. Some of the cases below may not
4436 happen, but it doesn't seem worth worrying about this. We "continue"
4437 the outer loop when we've changed something; otherwise we "break"
4438 the switch, which will "break" the while. */
4439
4440 in_p = 0;
4441 low = high = build_int_cst (TREE_TYPE (exp), 0);
4442
4443 while (1)
4444 {
4445 code = TREE_CODE (exp);
4446 exp_type = TREE_TYPE (exp);
4447 arg0 = NULL_TREE;
4448
4449 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4450 {
4451 if (TREE_OPERAND_LENGTH (exp) > 0)
4452 arg0 = TREE_OPERAND (exp, 0);
4453 if (TREE_CODE_CLASS (code) == tcc_binary
4454 || TREE_CODE_CLASS (code) == tcc_comparison
4455 || (TREE_CODE_CLASS (code) == tcc_expression
4456 && TREE_OPERAND_LENGTH (exp) > 1))
4457 arg1 = TREE_OPERAND (exp, 1);
4458 }
4459 if (arg0 == NULL_TREE)
4460 break;
4461
4462 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4463 &high, &in_p, strict_overflow_p);
4464 if (nexp == NULL_TREE)
4465 break;
4466 exp = nexp;
4467 }
4468
4469 /* If EXP is a constant, we can evaluate whether this is true or false. */
4470 if (TREE_CODE (exp) == INTEGER_CST)
4471 {
4472 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4473 exp, 0, low, 0))
4474 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4475 exp, 1, high, 1)));
4476 low = high = 0;
4477 exp = 0;
4478 }
4479
4480 *pin_p = in_p, *plow = low, *phigh = high;
4481 return exp;
4482 }
4483 \f
4484 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4485 type, TYPE, return an expression to test if EXP is in (or out of, depending
4486 on IN_P) the range. Return 0 if the test couldn't be created. */
4487
4488 tree
4489 build_range_check (location_t loc, tree type, tree exp, int in_p,
4490 tree low, tree high)
4491 {
4492 tree etype = TREE_TYPE (exp), value;
4493
4494 #ifdef HAVE_canonicalize_funcptr_for_compare
4495 /* Disable this optimization for function pointer expressions
4496 on targets that require function pointer canonicalization. */
4497 if (HAVE_canonicalize_funcptr_for_compare
4498 && TREE_CODE (etype) == POINTER_TYPE
4499 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4500 return NULL_TREE;
4501 #endif
4502
4503 if (! in_p)
4504 {
4505 value = build_range_check (loc, type, exp, 1, low, high);
4506 if (value != 0)
4507 return invert_truthvalue_loc (loc, value);
4508
4509 return 0;
4510 }
4511
4512 if (low == 0 && high == 0)
4513 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4514
4515 if (low == 0)
4516 return fold_build2_loc (loc, LE_EXPR, type, exp,
4517 fold_convert_loc (loc, etype, high));
4518
4519 if (high == 0)
4520 return fold_build2_loc (loc, GE_EXPR, type, exp,
4521 fold_convert_loc (loc, etype, low));
4522
4523 if (operand_equal_p (low, high, 0))
4524 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4525 fold_convert_loc (loc, etype, low));
4526
4527 if (integer_zerop (low))
4528 {
4529 if (! TYPE_UNSIGNED (etype))
4530 {
4531 etype = unsigned_type_for (etype);
4532 high = fold_convert_loc (loc, etype, high);
4533 exp = fold_convert_loc (loc, etype, exp);
4534 }
4535 return build_range_check (loc, type, exp, 1, 0, high);
4536 }
4537
4538 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4539 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4540 {
4541 int prec = TYPE_PRECISION (etype);
4542
4543 if (wi::mask (prec - 1, false, prec) == high)
4544 {
4545 if (TYPE_UNSIGNED (etype))
4546 {
4547 tree signed_etype = signed_type_for (etype);
4548 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4549 etype
4550 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4551 else
4552 etype = signed_etype;
4553 exp = fold_convert_loc (loc, etype, exp);
4554 }
4555 return fold_build2_loc (loc, GT_EXPR, type, exp,
4556 build_int_cst (etype, 0));
4557 }
4558 }
4559
4560 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4561 This requires wrap-around arithmetics for the type of the expression.
4562 First make sure that arithmetics in this type is valid, then make sure
4563 that it wraps around. */
4564 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4565 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4566 TYPE_UNSIGNED (etype));
4567
4568 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4569 {
4570 tree utype, minv, maxv;
4571
4572 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4573 for the type in question, as we rely on this here. */
4574 utype = unsigned_type_for (etype);
4575 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4576 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4577 build_int_cst (TREE_TYPE (maxv), 1), 1);
4578 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4579
4580 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4581 minv, 1, maxv, 1)))
4582 etype = utype;
4583 else
4584 return 0;
4585 }
4586
4587 high = fold_convert_loc (loc, etype, high);
4588 low = fold_convert_loc (loc, etype, low);
4589 exp = fold_convert_loc (loc, etype, exp);
4590
4591 value = const_binop (MINUS_EXPR, high, low);
4592
4593
4594 if (POINTER_TYPE_P (etype))
4595 {
4596 if (value != 0 && !TREE_OVERFLOW (value))
4597 {
4598 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4599 return build_range_check (loc, type,
4600 fold_build_pointer_plus_loc (loc, exp, low),
4601 1, build_int_cst (etype, 0), value);
4602 }
4603 return 0;
4604 }
4605
4606 if (value != 0 && !TREE_OVERFLOW (value))
4607 return build_range_check (loc, type,
4608 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4609 1, build_int_cst (etype, 0), value);
4610
4611 return 0;
4612 }
4613 \f
4614 /* Return the predecessor of VAL in its type, handling the infinite case. */
4615
4616 static tree
4617 range_predecessor (tree val)
4618 {
4619 tree type = TREE_TYPE (val);
4620
4621 if (INTEGRAL_TYPE_P (type)
4622 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4623 return 0;
4624 else
4625 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4626 build_int_cst (TREE_TYPE (val), 1), 0);
4627 }
4628
4629 /* Return the successor of VAL in its type, handling the infinite case. */
4630
4631 static tree
4632 range_successor (tree val)
4633 {
4634 tree type = TREE_TYPE (val);
4635
4636 if (INTEGRAL_TYPE_P (type)
4637 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4638 return 0;
4639 else
4640 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4641 build_int_cst (TREE_TYPE (val), 1), 0);
4642 }
4643
4644 /* Given two ranges, see if we can merge them into one. Return 1 if we
4645 can, 0 if we can't. Set the output range into the specified parameters. */
4646
4647 bool
4648 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4649 tree high0, int in1_p, tree low1, tree high1)
4650 {
4651 int no_overlap;
4652 int subset;
4653 int temp;
4654 tree tem;
4655 int in_p;
4656 tree low, high;
4657 int lowequal = ((low0 == 0 && low1 == 0)
4658 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4659 low0, 0, low1, 0)));
4660 int highequal = ((high0 == 0 && high1 == 0)
4661 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4662 high0, 1, high1, 1)));
4663
4664 /* Make range 0 be the range that starts first, or ends last if they
4665 start at the same value. Swap them if it isn't. */
4666 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4667 low0, 0, low1, 0))
4668 || (lowequal
4669 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4670 high1, 1, high0, 1))))
4671 {
4672 temp = in0_p, in0_p = in1_p, in1_p = temp;
4673 tem = low0, low0 = low1, low1 = tem;
4674 tem = high0, high0 = high1, high1 = tem;
4675 }
4676
4677 /* Now flag two cases, whether the ranges are disjoint or whether the
4678 second range is totally subsumed in the first. Note that the tests
4679 below are simplified by the ones above. */
4680 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4681 high0, 1, low1, 0));
4682 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4683 high1, 1, high0, 1));
4684
4685 /* We now have four cases, depending on whether we are including or
4686 excluding the two ranges. */
4687 if (in0_p && in1_p)
4688 {
4689 /* If they don't overlap, the result is false. If the second range
4690 is a subset it is the result. Otherwise, the range is from the start
4691 of the second to the end of the first. */
4692 if (no_overlap)
4693 in_p = 0, low = high = 0;
4694 else if (subset)
4695 in_p = 1, low = low1, high = high1;
4696 else
4697 in_p = 1, low = low1, high = high0;
4698 }
4699
4700 else if (in0_p && ! in1_p)
4701 {
4702 /* If they don't overlap, the result is the first range. If they are
4703 equal, the result is false. If the second range is a subset of the
4704 first, and the ranges begin at the same place, we go from just after
4705 the end of the second range to the end of the first. If the second
4706 range is not a subset of the first, or if it is a subset and both
4707 ranges end at the same place, the range starts at the start of the
4708 first range and ends just before the second range.
4709 Otherwise, we can't describe this as a single range. */
4710 if (no_overlap)
4711 in_p = 1, low = low0, high = high0;
4712 else if (lowequal && highequal)
4713 in_p = 0, low = high = 0;
4714 else if (subset && lowequal)
4715 {
4716 low = range_successor (high1);
4717 high = high0;
4718 in_p = 1;
4719 if (low == 0)
4720 {
4721 /* We are in the weird situation where high0 > high1 but
4722 high1 has no successor. Punt. */
4723 return 0;
4724 }
4725 }
4726 else if (! subset || highequal)
4727 {
4728 low = low0;
4729 high = range_predecessor (low1);
4730 in_p = 1;
4731 if (high == 0)
4732 {
4733 /* low0 < low1 but low1 has no predecessor. Punt. */
4734 return 0;
4735 }
4736 }
4737 else
4738 return 0;
4739 }
4740
4741 else if (! in0_p && in1_p)
4742 {
4743 /* If they don't overlap, the result is the second range. If the second
4744 is a subset of the first, the result is false. Otherwise,
4745 the range starts just after the first range and ends at the
4746 end of the second. */
4747 if (no_overlap)
4748 in_p = 1, low = low1, high = high1;
4749 else if (subset || highequal)
4750 in_p = 0, low = high = 0;
4751 else
4752 {
4753 low = range_successor (high0);
4754 high = high1;
4755 in_p = 1;
4756 if (low == 0)
4757 {
4758 /* high1 > high0 but high0 has no successor. Punt. */
4759 return 0;
4760 }
4761 }
4762 }
4763
4764 else
4765 {
4766 /* The case where we are excluding both ranges. Here the complex case
4767 is if they don't overlap. In that case, the only time we have a
4768 range is if they are adjacent. If the second is a subset of the
4769 first, the result is the first. Otherwise, the range to exclude
4770 starts at the beginning of the first range and ends at the end of the
4771 second. */
4772 if (no_overlap)
4773 {
4774 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4775 range_successor (high0),
4776 1, low1, 0)))
4777 in_p = 0, low = low0, high = high1;
4778 else
4779 {
4780 /* Canonicalize - [min, x] into - [-, x]. */
4781 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4782 switch (TREE_CODE (TREE_TYPE (low0)))
4783 {
4784 case ENUMERAL_TYPE:
4785 if (TYPE_PRECISION (TREE_TYPE (low0))
4786 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4787 break;
4788 /* FALLTHROUGH */
4789 case INTEGER_TYPE:
4790 if (tree_int_cst_equal (low0,
4791 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4792 low0 = 0;
4793 break;
4794 case POINTER_TYPE:
4795 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4796 && integer_zerop (low0))
4797 low0 = 0;
4798 break;
4799 default:
4800 break;
4801 }
4802
4803 /* Canonicalize - [x, max] into - [x, -]. */
4804 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4805 switch (TREE_CODE (TREE_TYPE (high1)))
4806 {
4807 case ENUMERAL_TYPE:
4808 if (TYPE_PRECISION (TREE_TYPE (high1))
4809 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4810 break;
4811 /* FALLTHROUGH */
4812 case INTEGER_TYPE:
4813 if (tree_int_cst_equal (high1,
4814 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4815 high1 = 0;
4816 break;
4817 case POINTER_TYPE:
4818 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4819 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4820 high1, 1,
4821 build_int_cst (TREE_TYPE (high1), 1),
4822 1)))
4823 high1 = 0;
4824 break;
4825 default:
4826 break;
4827 }
4828
4829 /* The ranges might be also adjacent between the maximum and
4830 minimum values of the given type. For
4831 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4832 return + [x + 1, y - 1]. */
4833 if (low0 == 0 && high1 == 0)
4834 {
4835 low = range_successor (high0);
4836 high = range_predecessor (low1);
4837 if (low == 0 || high == 0)
4838 return 0;
4839
4840 in_p = 1;
4841 }
4842 else
4843 return 0;
4844 }
4845 }
4846 else if (subset)
4847 in_p = 0, low = low0, high = high0;
4848 else
4849 in_p = 0, low = low0, high = high1;
4850 }
4851
4852 *pin_p = in_p, *plow = low, *phigh = high;
4853 return 1;
4854 }
4855 \f
4856
4857 /* Subroutine of fold, looking inside expressions of the form
4858 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4859 of the COND_EXPR. This function is being used also to optimize
4860 A op B ? C : A, by reversing the comparison first.
4861
4862 Return a folded expression whose code is not a COND_EXPR
4863 anymore, or NULL_TREE if no folding opportunity is found. */
4864
4865 static tree
4866 fold_cond_expr_with_comparison (location_t loc, tree type,
4867 tree arg0, tree arg1, tree arg2)
4868 {
4869 enum tree_code comp_code = TREE_CODE (arg0);
4870 tree arg00 = TREE_OPERAND (arg0, 0);
4871 tree arg01 = TREE_OPERAND (arg0, 1);
4872 tree arg1_type = TREE_TYPE (arg1);
4873 tree tem;
4874
4875 STRIP_NOPS (arg1);
4876 STRIP_NOPS (arg2);
4877
4878 /* If we have A op 0 ? A : -A, consider applying the following
4879 transformations:
4880
4881 A == 0? A : -A same as -A
4882 A != 0? A : -A same as A
4883 A >= 0? A : -A same as abs (A)
4884 A > 0? A : -A same as abs (A)
4885 A <= 0? A : -A same as -abs (A)
4886 A < 0? A : -A same as -abs (A)
4887
4888 None of these transformations work for modes with signed
4889 zeros. If A is +/-0, the first two transformations will
4890 change the sign of the result (from +0 to -0, or vice
4891 versa). The last four will fix the sign of the result,
4892 even though the original expressions could be positive or
4893 negative, depending on the sign of A.
4894
4895 Note that all these transformations are correct if A is
4896 NaN, since the two alternatives (A and -A) are also NaNs. */
4897 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4898 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4899 ? real_zerop (arg01)
4900 : integer_zerop (arg01))
4901 && ((TREE_CODE (arg2) == NEGATE_EXPR
4902 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4903 /* In the case that A is of the form X-Y, '-A' (arg2) may
4904 have already been folded to Y-X, check for that. */
4905 || (TREE_CODE (arg1) == MINUS_EXPR
4906 && TREE_CODE (arg2) == MINUS_EXPR
4907 && operand_equal_p (TREE_OPERAND (arg1, 0),
4908 TREE_OPERAND (arg2, 1), 0)
4909 && operand_equal_p (TREE_OPERAND (arg1, 1),
4910 TREE_OPERAND (arg2, 0), 0))))
4911 switch (comp_code)
4912 {
4913 case EQ_EXPR:
4914 case UNEQ_EXPR:
4915 tem = fold_convert_loc (loc, arg1_type, arg1);
4916 return pedantic_non_lvalue_loc (loc,
4917 fold_convert_loc (loc, type,
4918 negate_expr (tem)));
4919 case NE_EXPR:
4920 case LTGT_EXPR:
4921 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4922 case UNGE_EXPR:
4923 case UNGT_EXPR:
4924 if (flag_trapping_math)
4925 break;
4926 /* Fall through. */
4927 case GE_EXPR:
4928 case GT_EXPR:
4929 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4930 arg1 = fold_convert_loc (loc, signed_type_for
4931 (TREE_TYPE (arg1)), arg1);
4932 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4933 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4934 case UNLE_EXPR:
4935 case UNLT_EXPR:
4936 if (flag_trapping_math)
4937 break;
4938 case LE_EXPR:
4939 case LT_EXPR:
4940 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4941 arg1 = fold_convert_loc (loc, signed_type_for
4942 (TREE_TYPE (arg1)), arg1);
4943 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4944 return negate_expr (fold_convert_loc (loc, type, tem));
4945 default:
4946 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4947 break;
4948 }
4949
4950 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4951 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4952 both transformations are correct when A is NaN: A != 0
4953 is then true, and A == 0 is false. */
4954
4955 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4956 && integer_zerop (arg01) && integer_zerop (arg2))
4957 {
4958 if (comp_code == NE_EXPR)
4959 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4960 else if (comp_code == EQ_EXPR)
4961 return build_zero_cst (type);
4962 }
4963
4964 /* Try some transformations of A op B ? A : B.
4965
4966 A == B? A : B same as B
4967 A != B? A : B same as A
4968 A >= B? A : B same as max (A, B)
4969 A > B? A : B same as max (B, A)
4970 A <= B? A : B same as min (A, B)
4971 A < B? A : B same as min (B, A)
4972
4973 As above, these transformations don't work in the presence
4974 of signed zeros. For example, if A and B are zeros of
4975 opposite sign, the first two transformations will change
4976 the sign of the result. In the last four, the original
4977 expressions give different results for (A=+0, B=-0) and
4978 (A=-0, B=+0), but the transformed expressions do not.
4979
4980 The first two transformations are correct if either A or B
4981 is a NaN. In the first transformation, the condition will
4982 be false, and B will indeed be chosen. In the case of the
4983 second transformation, the condition A != B will be true,
4984 and A will be chosen.
4985
4986 The conversions to max() and min() are not correct if B is
4987 a number and A is not. The conditions in the original
4988 expressions will be false, so all four give B. The min()
4989 and max() versions would give a NaN instead. */
4990 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4991 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4992 /* Avoid these transformations if the COND_EXPR may be used
4993 as an lvalue in the C++ front-end. PR c++/19199. */
4994 && (in_gimple_form
4995 || VECTOR_TYPE_P (type)
4996 || (! lang_GNU_CXX ()
4997 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4998 || ! maybe_lvalue_p (arg1)
4999 || ! maybe_lvalue_p (arg2)))
5000 {
5001 tree comp_op0 = arg00;
5002 tree comp_op1 = arg01;
5003 tree comp_type = TREE_TYPE (comp_op0);
5004
5005 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5006 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5007 {
5008 comp_type = type;
5009 comp_op0 = arg1;
5010 comp_op1 = arg2;
5011 }
5012
5013 switch (comp_code)
5014 {
5015 case EQ_EXPR:
5016 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5017 case NE_EXPR:
5018 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5019 case LE_EXPR:
5020 case LT_EXPR:
5021 case UNLE_EXPR:
5022 case UNLT_EXPR:
5023 /* In C++ a ?: expression can be an lvalue, so put the
5024 operand which will be used if they are equal first
5025 so that we can convert this back to the
5026 corresponding COND_EXPR. */
5027 if (!HONOR_NANS (arg1))
5028 {
5029 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5030 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5031 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5032 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5033 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5034 comp_op1, comp_op0);
5035 return pedantic_non_lvalue_loc (loc,
5036 fold_convert_loc (loc, type, tem));
5037 }
5038 break;
5039 case GE_EXPR:
5040 case GT_EXPR:
5041 case UNGE_EXPR:
5042 case UNGT_EXPR:
5043 if (!HONOR_NANS (arg1))
5044 {
5045 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5046 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5047 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5048 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5049 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5050 comp_op1, comp_op0);
5051 return pedantic_non_lvalue_loc (loc,
5052 fold_convert_loc (loc, type, tem));
5053 }
5054 break;
5055 case UNEQ_EXPR:
5056 if (!HONOR_NANS (arg1))
5057 return pedantic_non_lvalue_loc (loc,
5058 fold_convert_loc (loc, type, arg2));
5059 break;
5060 case LTGT_EXPR:
5061 if (!HONOR_NANS (arg1))
5062 return pedantic_non_lvalue_loc (loc,
5063 fold_convert_loc (loc, type, arg1));
5064 break;
5065 default:
5066 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5067 break;
5068 }
5069 }
5070
5071 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5072 we might still be able to simplify this. For example,
5073 if C1 is one less or one more than C2, this might have started
5074 out as a MIN or MAX and been transformed by this function.
5075 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5076
5077 if (INTEGRAL_TYPE_P (type)
5078 && TREE_CODE (arg01) == INTEGER_CST
5079 && TREE_CODE (arg2) == INTEGER_CST)
5080 switch (comp_code)
5081 {
5082 case EQ_EXPR:
5083 if (TREE_CODE (arg1) == INTEGER_CST)
5084 break;
5085 /* We can replace A with C1 in this case. */
5086 arg1 = fold_convert_loc (loc, type, arg01);
5087 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5088
5089 case LT_EXPR:
5090 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5091 MIN_EXPR, to preserve the signedness of the comparison. */
5092 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5093 OEP_ONLY_CONST)
5094 && operand_equal_p (arg01,
5095 const_binop (PLUS_EXPR, arg2,
5096 build_int_cst (type, 1)),
5097 OEP_ONLY_CONST))
5098 {
5099 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5100 fold_convert_loc (loc, TREE_TYPE (arg00),
5101 arg2));
5102 return pedantic_non_lvalue_loc (loc,
5103 fold_convert_loc (loc, type, tem));
5104 }
5105 break;
5106
5107 case LE_EXPR:
5108 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5109 as above. */
5110 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5111 OEP_ONLY_CONST)
5112 && operand_equal_p (arg01,
5113 const_binop (MINUS_EXPR, arg2,
5114 build_int_cst (type, 1)),
5115 OEP_ONLY_CONST))
5116 {
5117 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5118 fold_convert_loc (loc, TREE_TYPE (arg00),
5119 arg2));
5120 return pedantic_non_lvalue_loc (loc,
5121 fold_convert_loc (loc, type, tem));
5122 }
5123 break;
5124
5125 case GT_EXPR:
5126 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5127 MAX_EXPR, to preserve the signedness of the comparison. */
5128 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5129 OEP_ONLY_CONST)
5130 && operand_equal_p (arg01,
5131 const_binop (MINUS_EXPR, arg2,
5132 build_int_cst (type, 1)),
5133 OEP_ONLY_CONST))
5134 {
5135 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5136 fold_convert_loc (loc, TREE_TYPE (arg00),
5137 arg2));
5138 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5139 }
5140 break;
5141
5142 case GE_EXPR:
5143 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5144 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5145 OEP_ONLY_CONST)
5146 && operand_equal_p (arg01,
5147 const_binop (PLUS_EXPR, arg2,
5148 build_int_cst (type, 1)),
5149 OEP_ONLY_CONST))
5150 {
5151 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5152 fold_convert_loc (loc, TREE_TYPE (arg00),
5153 arg2));
5154 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5155 }
5156 break;
5157 case NE_EXPR:
5158 break;
5159 default:
5160 gcc_unreachable ();
5161 }
5162
5163 return NULL_TREE;
5164 }
5165
5166
5167 \f
5168 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5169 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5170 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5171 false) >= 2)
5172 #endif
5173
5174 /* EXP is some logical combination of boolean tests. See if we can
5175 merge it into some range test. Return the new tree if so. */
5176
5177 static tree
5178 fold_range_test (location_t loc, enum tree_code code, tree type,
5179 tree op0, tree op1)
5180 {
5181 int or_op = (code == TRUTH_ORIF_EXPR
5182 || code == TRUTH_OR_EXPR);
5183 int in0_p, in1_p, in_p;
5184 tree low0, low1, low, high0, high1, high;
5185 bool strict_overflow_p = false;
5186 tree tem, lhs, rhs;
5187 const char * const warnmsg = G_("assuming signed overflow does not occur "
5188 "when simplifying range test");
5189
5190 if (!INTEGRAL_TYPE_P (type))
5191 return 0;
5192
5193 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5194 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5195
5196 /* If this is an OR operation, invert both sides; we will invert
5197 again at the end. */
5198 if (or_op)
5199 in0_p = ! in0_p, in1_p = ! in1_p;
5200
5201 /* If both expressions are the same, if we can merge the ranges, and we
5202 can build the range test, return it or it inverted. If one of the
5203 ranges is always true or always false, consider it to be the same
5204 expression as the other. */
5205 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5206 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5207 in1_p, low1, high1)
5208 && 0 != (tem = (build_range_check (loc, type,
5209 lhs != 0 ? lhs
5210 : rhs != 0 ? rhs : integer_zero_node,
5211 in_p, low, high))))
5212 {
5213 if (strict_overflow_p)
5214 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5215 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5216 }
5217
5218 /* On machines where the branch cost is expensive, if this is a
5219 short-circuited branch and the underlying object on both sides
5220 is the same, make a non-short-circuit operation. */
5221 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5222 && lhs != 0 && rhs != 0
5223 && (code == TRUTH_ANDIF_EXPR
5224 || code == TRUTH_ORIF_EXPR)
5225 && operand_equal_p (lhs, rhs, 0))
5226 {
5227 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5228 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5229 which cases we can't do this. */
5230 if (simple_operand_p (lhs))
5231 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5232 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5233 type, op0, op1);
5234
5235 else if (!lang_hooks.decls.global_bindings_p ()
5236 && !CONTAINS_PLACEHOLDER_P (lhs))
5237 {
5238 tree common = save_expr (lhs);
5239
5240 if (0 != (lhs = build_range_check (loc, type, common,
5241 or_op ? ! in0_p : in0_p,
5242 low0, high0))
5243 && (0 != (rhs = build_range_check (loc, type, common,
5244 or_op ? ! in1_p : in1_p,
5245 low1, high1))))
5246 {
5247 if (strict_overflow_p)
5248 fold_overflow_warning (warnmsg,
5249 WARN_STRICT_OVERFLOW_COMPARISON);
5250 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5251 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5252 type, lhs, rhs);
5253 }
5254 }
5255 }
5256
5257 return 0;
5258 }
5259 \f
5260 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5261 bit value. Arrange things so the extra bits will be set to zero if and
5262 only if C is signed-extended to its full width. If MASK is nonzero,
5263 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5264
5265 static tree
5266 unextend (tree c, int p, int unsignedp, tree mask)
5267 {
5268 tree type = TREE_TYPE (c);
5269 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5270 tree temp;
5271
5272 if (p == modesize || unsignedp)
5273 return c;
5274
5275 /* We work by getting just the sign bit into the low-order bit, then
5276 into the high-order bit, then sign-extend. We then XOR that value
5277 with C. */
5278 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5279
5280 /* We must use a signed type in order to get an arithmetic right shift.
5281 However, we must also avoid introducing accidental overflows, so that
5282 a subsequent call to integer_zerop will work. Hence we must
5283 do the type conversion here. At this point, the constant is either
5284 zero or one, and the conversion to a signed type can never overflow.
5285 We could get an overflow if this conversion is done anywhere else. */
5286 if (TYPE_UNSIGNED (type))
5287 temp = fold_convert (signed_type_for (type), temp);
5288
5289 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5290 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5291 if (mask != 0)
5292 temp = const_binop (BIT_AND_EXPR, temp,
5293 fold_convert (TREE_TYPE (c), mask));
5294 /* If necessary, convert the type back to match the type of C. */
5295 if (TYPE_UNSIGNED (type))
5296 temp = fold_convert (type, temp);
5297
5298 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5299 }
5300 \f
5301 /* For an expression that has the form
5302 (A && B) || ~B
5303 or
5304 (A || B) && ~B,
5305 we can drop one of the inner expressions and simplify to
5306 A || ~B
5307 or
5308 A && ~B
5309 LOC is the location of the resulting expression. OP is the inner
5310 logical operation; the left-hand side in the examples above, while CMPOP
5311 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5312 removing a condition that guards another, as in
5313 (A != NULL && A->...) || A == NULL
5314 which we must not transform. If RHS_ONLY is true, only eliminate the
5315 right-most operand of the inner logical operation. */
5316
5317 static tree
5318 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5319 bool rhs_only)
5320 {
5321 tree type = TREE_TYPE (cmpop);
5322 enum tree_code code = TREE_CODE (cmpop);
5323 enum tree_code truthop_code = TREE_CODE (op);
5324 tree lhs = TREE_OPERAND (op, 0);
5325 tree rhs = TREE_OPERAND (op, 1);
5326 tree orig_lhs = lhs, orig_rhs = rhs;
5327 enum tree_code rhs_code = TREE_CODE (rhs);
5328 enum tree_code lhs_code = TREE_CODE (lhs);
5329 enum tree_code inv_code;
5330
5331 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5332 return NULL_TREE;
5333
5334 if (TREE_CODE_CLASS (code) != tcc_comparison)
5335 return NULL_TREE;
5336
5337 if (rhs_code == truthop_code)
5338 {
5339 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5340 if (newrhs != NULL_TREE)
5341 {
5342 rhs = newrhs;
5343 rhs_code = TREE_CODE (rhs);
5344 }
5345 }
5346 if (lhs_code == truthop_code && !rhs_only)
5347 {
5348 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5349 if (newlhs != NULL_TREE)
5350 {
5351 lhs = newlhs;
5352 lhs_code = TREE_CODE (lhs);
5353 }
5354 }
5355
5356 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5357 if (inv_code == rhs_code
5358 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5359 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5360 return lhs;
5361 if (!rhs_only && inv_code == lhs_code
5362 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5363 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5364 return rhs;
5365 if (rhs != orig_rhs || lhs != orig_lhs)
5366 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5367 lhs, rhs);
5368 return NULL_TREE;
5369 }
5370
5371 /* Find ways of folding logical expressions of LHS and RHS:
5372 Try to merge two comparisons to the same innermost item.
5373 Look for range tests like "ch >= '0' && ch <= '9'".
5374 Look for combinations of simple terms on machines with expensive branches
5375 and evaluate the RHS unconditionally.
5376
5377 For example, if we have p->a == 2 && p->b == 4 and we can make an
5378 object large enough to span both A and B, we can do this with a comparison
5379 against the object ANDed with the a mask.
5380
5381 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5382 operations to do this with one comparison.
5383
5384 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5385 function and the one above.
5386
5387 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5388 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5389
5390 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5391 two operands.
5392
5393 We return the simplified tree or 0 if no optimization is possible. */
5394
5395 static tree
5396 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5397 tree lhs, tree rhs)
5398 {
5399 /* If this is the "or" of two comparisons, we can do something if
5400 the comparisons are NE_EXPR. If this is the "and", we can do something
5401 if the comparisons are EQ_EXPR. I.e.,
5402 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5403
5404 WANTED_CODE is this operation code. For single bit fields, we can
5405 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5406 comparison for one-bit fields. */
5407
5408 enum tree_code wanted_code;
5409 enum tree_code lcode, rcode;
5410 tree ll_arg, lr_arg, rl_arg, rr_arg;
5411 tree ll_inner, lr_inner, rl_inner, rr_inner;
5412 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5413 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5414 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5415 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5416 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5417 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5418 machine_mode lnmode, rnmode;
5419 tree ll_mask, lr_mask, rl_mask, rr_mask;
5420 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5421 tree l_const, r_const;
5422 tree lntype, rntype, result;
5423 HOST_WIDE_INT first_bit, end_bit;
5424 int volatilep;
5425
5426 /* Start by getting the comparison codes. Fail if anything is volatile.
5427 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5428 it were surrounded with a NE_EXPR. */
5429
5430 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5431 return 0;
5432
5433 lcode = TREE_CODE (lhs);
5434 rcode = TREE_CODE (rhs);
5435
5436 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5437 {
5438 lhs = build2 (NE_EXPR, truth_type, lhs,
5439 build_int_cst (TREE_TYPE (lhs), 0));
5440 lcode = NE_EXPR;
5441 }
5442
5443 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5444 {
5445 rhs = build2 (NE_EXPR, truth_type, rhs,
5446 build_int_cst (TREE_TYPE (rhs), 0));
5447 rcode = NE_EXPR;
5448 }
5449
5450 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5451 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5452 return 0;
5453
5454 ll_arg = TREE_OPERAND (lhs, 0);
5455 lr_arg = TREE_OPERAND (lhs, 1);
5456 rl_arg = TREE_OPERAND (rhs, 0);
5457 rr_arg = TREE_OPERAND (rhs, 1);
5458
5459 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5460 if (simple_operand_p (ll_arg)
5461 && simple_operand_p (lr_arg))
5462 {
5463 if (operand_equal_p (ll_arg, rl_arg, 0)
5464 && operand_equal_p (lr_arg, rr_arg, 0))
5465 {
5466 result = combine_comparisons (loc, code, lcode, rcode,
5467 truth_type, ll_arg, lr_arg);
5468 if (result)
5469 return result;
5470 }
5471 else if (operand_equal_p (ll_arg, rr_arg, 0)
5472 && operand_equal_p (lr_arg, rl_arg, 0))
5473 {
5474 result = combine_comparisons (loc, code, lcode,
5475 swap_tree_comparison (rcode),
5476 truth_type, ll_arg, lr_arg);
5477 if (result)
5478 return result;
5479 }
5480 }
5481
5482 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5483 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5484
5485 /* If the RHS can be evaluated unconditionally and its operands are
5486 simple, it wins to evaluate the RHS unconditionally on machines
5487 with expensive branches. In this case, this isn't a comparison
5488 that can be merged. */
5489
5490 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5491 false) >= 2
5492 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5493 && simple_operand_p (rl_arg)
5494 && simple_operand_p (rr_arg))
5495 {
5496 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5497 if (code == TRUTH_OR_EXPR
5498 && lcode == NE_EXPR && integer_zerop (lr_arg)
5499 && rcode == NE_EXPR && integer_zerop (rr_arg)
5500 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5501 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5502 return build2_loc (loc, NE_EXPR, truth_type,
5503 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5504 ll_arg, rl_arg),
5505 build_int_cst (TREE_TYPE (ll_arg), 0));
5506
5507 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5508 if (code == TRUTH_AND_EXPR
5509 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5510 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5511 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5512 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5513 return build2_loc (loc, EQ_EXPR, truth_type,
5514 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5515 ll_arg, rl_arg),
5516 build_int_cst (TREE_TYPE (ll_arg), 0));
5517 }
5518
5519 /* See if the comparisons can be merged. Then get all the parameters for
5520 each side. */
5521
5522 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5523 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5524 return 0;
5525
5526 volatilep = 0;
5527 ll_inner = decode_field_reference (loc, ll_arg,
5528 &ll_bitsize, &ll_bitpos, &ll_mode,
5529 &ll_unsignedp, &volatilep, &ll_mask,
5530 &ll_and_mask);
5531 lr_inner = decode_field_reference (loc, lr_arg,
5532 &lr_bitsize, &lr_bitpos, &lr_mode,
5533 &lr_unsignedp, &volatilep, &lr_mask,
5534 &lr_and_mask);
5535 rl_inner = decode_field_reference (loc, rl_arg,
5536 &rl_bitsize, &rl_bitpos, &rl_mode,
5537 &rl_unsignedp, &volatilep, &rl_mask,
5538 &rl_and_mask);
5539 rr_inner = decode_field_reference (loc, rr_arg,
5540 &rr_bitsize, &rr_bitpos, &rr_mode,
5541 &rr_unsignedp, &volatilep, &rr_mask,
5542 &rr_and_mask);
5543
5544 /* It must be true that the inner operation on the lhs of each
5545 comparison must be the same if we are to be able to do anything.
5546 Then see if we have constants. If not, the same must be true for
5547 the rhs's. */
5548 if (volatilep || ll_inner == 0 || rl_inner == 0
5549 || ! operand_equal_p (ll_inner, rl_inner, 0))
5550 return 0;
5551
5552 if (TREE_CODE (lr_arg) == INTEGER_CST
5553 && TREE_CODE (rr_arg) == INTEGER_CST)
5554 l_const = lr_arg, r_const = rr_arg;
5555 else if (lr_inner == 0 || rr_inner == 0
5556 || ! operand_equal_p (lr_inner, rr_inner, 0))
5557 return 0;
5558 else
5559 l_const = r_const = 0;
5560
5561 /* If either comparison code is not correct for our logical operation,
5562 fail. However, we can convert a one-bit comparison against zero into
5563 the opposite comparison against that bit being set in the field. */
5564
5565 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5566 if (lcode != wanted_code)
5567 {
5568 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5569 {
5570 /* Make the left operand unsigned, since we are only interested
5571 in the value of one bit. Otherwise we are doing the wrong
5572 thing below. */
5573 ll_unsignedp = 1;
5574 l_const = ll_mask;
5575 }
5576 else
5577 return 0;
5578 }
5579
5580 /* This is analogous to the code for l_const above. */
5581 if (rcode != wanted_code)
5582 {
5583 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5584 {
5585 rl_unsignedp = 1;
5586 r_const = rl_mask;
5587 }
5588 else
5589 return 0;
5590 }
5591
5592 /* See if we can find a mode that contains both fields being compared on
5593 the left. If we can't, fail. Otherwise, update all constants and masks
5594 to be relative to a field of that size. */
5595 first_bit = MIN (ll_bitpos, rl_bitpos);
5596 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5597 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5598 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5599 volatilep);
5600 if (lnmode == VOIDmode)
5601 return 0;
5602
5603 lnbitsize = GET_MODE_BITSIZE (lnmode);
5604 lnbitpos = first_bit & ~ (lnbitsize - 1);
5605 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5606 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5607
5608 if (BYTES_BIG_ENDIAN)
5609 {
5610 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5611 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5612 }
5613
5614 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5615 size_int (xll_bitpos));
5616 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5617 size_int (xrl_bitpos));
5618
5619 if (l_const)
5620 {
5621 l_const = fold_convert_loc (loc, lntype, l_const);
5622 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5623 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5624 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5625 fold_build1_loc (loc, BIT_NOT_EXPR,
5626 lntype, ll_mask))))
5627 {
5628 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5629
5630 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5631 }
5632 }
5633 if (r_const)
5634 {
5635 r_const = fold_convert_loc (loc, lntype, r_const);
5636 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5637 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5638 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5639 fold_build1_loc (loc, BIT_NOT_EXPR,
5640 lntype, rl_mask))))
5641 {
5642 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5643
5644 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5645 }
5646 }
5647
5648 /* If the right sides are not constant, do the same for it. Also,
5649 disallow this optimization if a size or signedness mismatch occurs
5650 between the left and right sides. */
5651 if (l_const == 0)
5652 {
5653 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5654 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5655 /* Make sure the two fields on the right
5656 correspond to the left without being swapped. */
5657 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5658 return 0;
5659
5660 first_bit = MIN (lr_bitpos, rr_bitpos);
5661 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5662 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5663 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5664 volatilep);
5665 if (rnmode == VOIDmode)
5666 return 0;
5667
5668 rnbitsize = GET_MODE_BITSIZE (rnmode);
5669 rnbitpos = first_bit & ~ (rnbitsize - 1);
5670 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5671 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5672
5673 if (BYTES_BIG_ENDIAN)
5674 {
5675 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5676 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5677 }
5678
5679 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5680 rntype, lr_mask),
5681 size_int (xlr_bitpos));
5682 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5683 rntype, rr_mask),
5684 size_int (xrr_bitpos));
5685
5686 /* Make a mask that corresponds to both fields being compared.
5687 Do this for both items being compared. If the operands are the
5688 same size and the bits being compared are in the same position
5689 then we can do this by masking both and comparing the masked
5690 results. */
5691 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5692 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5693 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5694 {
5695 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5696 ll_unsignedp || rl_unsignedp);
5697 if (! all_ones_mask_p (ll_mask, lnbitsize))
5698 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5699
5700 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5701 lr_unsignedp || rr_unsignedp);
5702 if (! all_ones_mask_p (lr_mask, rnbitsize))
5703 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5704
5705 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5706 }
5707
5708 /* There is still another way we can do something: If both pairs of
5709 fields being compared are adjacent, we may be able to make a wider
5710 field containing them both.
5711
5712 Note that we still must mask the lhs/rhs expressions. Furthermore,
5713 the mask must be shifted to account for the shift done by
5714 make_bit_field_ref. */
5715 if ((ll_bitsize + ll_bitpos == rl_bitpos
5716 && lr_bitsize + lr_bitpos == rr_bitpos)
5717 || (ll_bitpos == rl_bitpos + rl_bitsize
5718 && lr_bitpos == rr_bitpos + rr_bitsize))
5719 {
5720 tree type;
5721
5722 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5723 ll_bitsize + rl_bitsize,
5724 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5725 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5726 lr_bitsize + rr_bitsize,
5727 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5728
5729 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5730 size_int (MIN (xll_bitpos, xrl_bitpos)));
5731 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5732 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5733
5734 /* Convert to the smaller type before masking out unwanted bits. */
5735 type = lntype;
5736 if (lntype != rntype)
5737 {
5738 if (lnbitsize > rnbitsize)
5739 {
5740 lhs = fold_convert_loc (loc, rntype, lhs);
5741 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5742 type = rntype;
5743 }
5744 else if (lnbitsize < rnbitsize)
5745 {
5746 rhs = fold_convert_loc (loc, lntype, rhs);
5747 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5748 type = lntype;
5749 }
5750 }
5751
5752 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5753 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5754
5755 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5756 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5757
5758 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5759 }
5760
5761 return 0;
5762 }
5763
5764 /* Handle the case of comparisons with constants. If there is something in
5765 common between the masks, those bits of the constants must be the same.
5766 If not, the condition is always false. Test for this to avoid generating
5767 incorrect code below. */
5768 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5769 if (! integer_zerop (result)
5770 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5771 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5772 {
5773 if (wanted_code == NE_EXPR)
5774 {
5775 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5776 return constant_boolean_node (true, truth_type);
5777 }
5778 else
5779 {
5780 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5781 return constant_boolean_node (false, truth_type);
5782 }
5783 }
5784
5785 /* Construct the expression we will return. First get the component
5786 reference we will make. Unless the mask is all ones the width of
5787 that field, perform the mask operation. Then compare with the
5788 merged constant. */
5789 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5790 ll_unsignedp || rl_unsignedp);
5791
5792 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5793 if (! all_ones_mask_p (ll_mask, lnbitsize))
5794 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5795
5796 return build2_loc (loc, wanted_code, truth_type, result,
5797 const_binop (BIT_IOR_EXPR, l_const, r_const));
5798 }
5799 \f
5800 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5801 constant. */
5802
5803 static tree
5804 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5805 tree op0, tree op1)
5806 {
5807 tree arg0 = op0;
5808 enum tree_code op_code;
5809 tree comp_const;
5810 tree minmax_const;
5811 int consts_equal, consts_lt;
5812 tree inner;
5813
5814 STRIP_SIGN_NOPS (arg0);
5815
5816 op_code = TREE_CODE (arg0);
5817 minmax_const = TREE_OPERAND (arg0, 1);
5818 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5819 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5820 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5821 inner = TREE_OPERAND (arg0, 0);
5822
5823 /* If something does not permit us to optimize, return the original tree. */
5824 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5825 || TREE_CODE (comp_const) != INTEGER_CST
5826 || TREE_OVERFLOW (comp_const)
5827 || TREE_CODE (minmax_const) != INTEGER_CST
5828 || TREE_OVERFLOW (minmax_const))
5829 return NULL_TREE;
5830
5831 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5832 and GT_EXPR, doing the rest with recursive calls using logical
5833 simplifications. */
5834 switch (code)
5835 {
5836 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5837 {
5838 tree tem
5839 = optimize_minmax_comparison (loc,
5840 invert_tree_comparison (code, false),
5841 type, op0, op1);
5842 if (tem)
5843 return invert_truthvalue_loc (loc, tem);
5844 return NULL_TREE;
5845 }
5846
5847 case GE_EXPR:
5848 return
5849 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5850 optimize_minmax_comparison
5851 (loc, EQ_EXPR, type, arg0, comp_const),
5852 optimize_minmax_comparison
5853 (loc, GT_EXPR, type, arg0, comp_const));
5854
5855 case EQ_EXPR:
5856 if (op_code == MAX_EXPR && consts_equal)
5857 /* MAX (X, 0) == 0 -> X <= 0 */
5858 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5859
5860 else if (op_code == MAX_EXPR && consts_lt)
5861 /* MAX (X, 0) == 5 -> X == 5 */
5862 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5863
5864 else if (op_code == MAX_EXPR)
5865 /* MAX (X, 0) == -1 -> false */
5866 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5867
5868 else if (consts_equal)
5869 /* MIN (X, 0) == 0 -> X >= 0 */
5870 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5871
5872 else if (consts_lt)
5873 /* MIN (X, 0) == 5 -> false */
5874 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5875
5876 else
5877 /* MIN (X, 0) == -1 -> X == -1 */
5878 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5879
5880 case GT_EXPR:
5881 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5882 /* MAX (X, 0) > 0 -> X > 0
5883 MAX (X, 0) > 5 -> X > 5 */
5884 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5885
5886 else if (op_code == MAX_EXPR)
5887 /* MAX (X, 0) > -1 -> true */
5888 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5889
5890 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5891 /* MIN (X, 0) > 0 -> false
5892 MIN (X, 0) > 5 -> false */
5893 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5894
5895 else
5896 /* MIN (X, 0) > -1 -> X > -1 */
5897 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5898
5899 default:
5900 return NULL_TREE;
5901 }
5902 }
5903 \f
5904 /* T is an integer expression that is being multiplied, divided, or taken a
5905 modulus (CODE says which and what kind of divide or modulus) by a
5906 constant C. See if we can eliminate that operation by folding it with
5907 other operations already in T. WIDE_TYPE, if non-null, is a type that
5908 should be used for the computation if wider than our type.
5909
5910 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5911 (X * 2) + (Y * 4). We must, however, be assured that either the original
5912 expression would not overflow or that overflow is undefined for the type
5913 in the language in question.
5914
5915 If we return a non-null expression, it is an equivalent form of the
5916 original computation, but need not be in the original type.
5917
5918 We set *STRICT_OVERFLOW_P to true if the return values depends on
5919 signed overflow being undefined. Otherwise we do not change
5920 *STRICT_OVERFLOW_P. */
5921
5922 static tree
5923 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5924 bool *strict_overflow_p)
5925 {
5926 /* To avoid exponential search depth, refuse to allow recursion past
5927 three levels. Beyond that (1) it's highly unlikely that we'll find
5928 something interesting and (2) we've probably processed it before
5929 when we built the inner expression. */
5930
5931 static int depth;
5932 tree ret;
5933
5934 if (depth > 3)
5935 return NULL;
5936
5937 depth++;
5938 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5939 depth--;
5940
5941 return ret;
5942 }
5943
5944 static tree
5945 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5946 bool *strict_overflow_p)
5947 {
5948 tree type = TREE_TYPE (t);
5949 enum tree_code tcode = TREE_CODE (t);
5950 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5951 > GET_MODE_SIZE (TYPE_MODE (type)))
5952 ? wide_type : type);
5953 tree t1, t2;
5954 int same_p = tcode == code;
5955 tree op0 = NULL_TREE, op1 = NULL_TREE;
5956 bool sub_strict_overflow_p;
5957
5958 /* Don't deal with constants of zero here; they confuse the code below. */
5959 if (integer_zerop (c))
5960 return NULL_TREE;
5961
5962 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5963 op0 = TREE_OPERAND (t, 0);
5964
5965 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5966 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5967
5968 /* Note that we need not handle conditional operations here since fold
5969 already handles those cases. So just do arithmetic here. */
5970 switch (tcode)
5971 {
5972 case INTEGER_CST:
5973 /* For a constant, we can always simplify if we are a multiply
5974 or (for divide and modulus) if it is a multiple of our constant. */
5975 if (code == MULT_EXPR
5976 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5977 return const_binop (code, fold_convert (ctype, t),
5978 fold_convert (ctype, c));
5979 break;
5980
5981 CASE_CONVERT: case NON_LVALUE_EXPR:
5982 /* If op0 is an expression ... */
5983 if ((COMPARISON_CLASS_P (op0)
5984 || UNARY_CLASS_P (op0)
5985 || BINARY_CLASS_P (op0)
5986 || VL_EXP_CLASS_P (op0)
5987 || EXPRESSION_CLASS_P (op0))
5988 /* ... and has wrapping overflow, and its type is smaller
5989 than ctype, then we cannot pass through as widening. */
5990 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5991 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5992 && (TYPE_PRECISION (ctype)
5993 > TYPE_PRECISION (TREE_TYPE (op0))))
5994 /* ... or this is a truncation (t is narrower than op0),
5995 then we cannot pass through this narrowing. */
5996 || (TYPE_PRECISION (type)
5997 < TYPE_PRECISION (TREE_TYPE (op0)))
5998 /* ... or signedness changes for division or modulus,
5999 then we cannot pass through this conversion. */
6000 || (code != MULT_EXPR
6001 && (TYPE_UNSIGNED (ctype)
6002 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6003 /* ... or has undefined overflow while the converted to
6004 type has not, we cannot do the operation in the inner type
6005 as that would introduce undefined overflow. */
6006 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6007 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6008 && !TYPE_OVERFLOW_UNDEFINED (type))))
6009 break;
6010
6011 /* Pass the constant down and see if we can make a simplification. If
6012 we can, replace this expression with the inner simplification for
6013 possible later conversion to our or some other type. */
6014 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6015 && TREE_CODE (t2) == INTEGER_CST
6016 && !TREE_OVERFLOW (t2)
6017 && (0 != (t1 = extract_muldiv (op0, t2, code,
6018 code == MULT_EXPR
6019 ? ctype : NULL_TREE,
6020 strict_overflow_p))))
6021 return t1;
6022 break;
6023
6024 case ABS_EXPR:
6025 /* If widening the type changes it from signed to unsigned, then we
6026 must avoid building ABS_EXPR itself as unsigned. */
6027 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6028 {
6029 tree cstype = (*signed_type_for) (ctype);
6030 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6031 != 0)
6032 {
6033 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6034 return fold_convert (ctype, t1);
6035 }
6036 break;
6037 }
6038 /* If the constant is negative, we cannot simplify this. */
6039 if (tree_int_cst_sgn (c) == -1)
6040 break;
6041 /* FALLTHROUGH */
6042 case NEGATE_EXPR:
6043 /* For division and modulus, type can't be unsigned, as e.g.
6044 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6045 For signed types, even with wrapping overflow, this is fine. */
6046 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6047 break;
6048 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6049 != 0)
6050 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6051 break;
6052
6053 case MIN_EXPR: case MAX_EXPR:
6054 /* If widening the type changes the signedness, then we can't perform
6055 this optimization as that changes the result. */
6056 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6057 break;
6058
6059 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6060 sub_strict_overflow_p = false;
6061 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6062 &sub_strict_overflow_p)) != 0
6063 && (t2 = extract_muldiv (op1, c, code, wide_type,
6064 &sub_strict_overflow_p)) != 0)
6065 {
6066 if (tree_int_cst_sgn (c) < 0)
6067 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6068 if (sub_strict_overflow_p)
6069 *strict_overflow_p = true;
6070 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6071 fold_convert (ctype, t2));
6072 }
6073 break;
6074
6075 case LSHIFT_EXPR: case RSHIFT_EXPR:
6076 /* If the second operand is constant, this is a multiplication
6077 or floor division, by a power of two, so we can treat it that
6078 way unless the multiplier or divisor overflows. Signed
6079 left-shift overflow is implementation-defined rather than
6080 undefined in C90, so do not convert signed left shift into
6081 multiplication. */
6082 if (TREE_CODE (op1) == INTEGER_CST
6083 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6084 /* const_binop may not detect overflow correctly,
6085 so check for it explicitly here. */
6086 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6087 && 0 != (t1 = fold_convert (ctype,
6088 const_binop (LSHIFT_EXPR,
6089 size_one_node,
6090 op1)))
6091 && !TREE_OVERFLOW (t1))
6092 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6093 ? MULT_EXPR : FLOOR_DIV_EXPR,
6094 ctype,
6095 fold_convert (ctype, op0),
6096 t1),
6097 c, code, wide_type, strict_overflow_p);
6098 break;
6099
6100 case PLUS_EXPR: case MINUS_EXPR:
6101 /* See if we can eliminate the operation on both sides. If we can, we
6102 can return a new PLUS or MINUS. If we can't, the only remaining
6103 cases where we can do anything are if the second operand is a
6104 constant. */
6105 sub_strict_overflow_p = false;
6106 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6107 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6108 if (t1 != 0 && t2 != 0
6109 && (code == MULT_EXPR
6110 /* If not multiplication, we can only do this if both operands
6111 are divisible by c. */
6112 || (multiple_of_p (ctype, op0, c)
6113 && multiple_of_p (ctype, op1, c))))
6114 {
6115 if (sub_strict_overflow_p)
6116 *strict_overflow_p = true;
6117 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6118 fold_convert (ctype, t2));
6119 }
6120
6121 /* If this was a subtraction, negate OP1 and set it to be an addition.
6122 This simplifies the logic below. */
6123 if (tcode == MINUS_EXPR)
6124 {
6125 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6126 /* If OP1 was not easily negatable, the constant may be OP0. */
6127 if (TREE_CODE (op0) == INTEGER_CST)
6128 {
6129 tree tem = op0;
6130 op0 = op1;
6131 op1 = tem;
6132 tem = t1;
6133 t1 = t2;
6134 t2 = tem;
6135 }
6136 }
6137
6138 if (TREE_CODE (op1) != INTEGER_CST)
6139 break;
6140
6141 /* If either OP1 or C are negative, this optimization is not safe for
6142 some of the division and remainder types while for others we need
6143 to change the code. */
6144 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6145 {
6146 if (code == CEIL_DIV_EXPR)
6147 code = FLOOR_DIV_EXPR;
6148 else if (code == FLOOR_DIV_EXPR)
6149 code = CEIL_DIV_EXPR;
6150 else if (code != MULT_EXPR
6151 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6152 break;
6153 }
6154
6155 /* If it's a multiply or a division/modulus operation of a multiple
6156 of our constant, do the operation and verify it doesn't overflow. */
6157 if (code == MULT_EXPR
6158 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6159 {
6160 op1 = const_binop (code, fold_convert (ctype, op1),
6161 fold_convert (ctype, c));
6162 /* We allow the constant to overflow with wrapping semantics. */
6163 if (op1 == 0
6164 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6165 break;
6166 }
6167 else
6168 break;
6169
6170 /* If we have an unsigned type, we cannot widen the operation since it
6171 will change the result if the original computation overflowed. */
6172 if (TYPE_UNSIGNED (ctype) && ctype != type)
6173 break;
6174
6175 /* If we were able to eliminate our operation from the first side,
6176 apply our operation to the second side and reform the PLUS. */
6177 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6178 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6179
6180 /* The last case is if we are a multiply. In that case, we can
6181 apply the distributive law to commute the multiply and addition
6182 if the multiplication of the constants doesn't overflow
6183 and overflow is defined. With undefined overflow
6184 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6185 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6186 return fold_build2 (tcode, ctype,
6187 fold_build2 (code, ctype,
6188 fold_convert (ctype, op0),
6189 fold_convert (ctype, c)),
6190 op1);
6191
6192 break;
6193
6194 case MULT_EXPR:
6195 /* We have a special case here if we are doing something like
6196 (C * 8) % 4 since we know that's zero. */
6197 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6198 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6199 /* If the multiplication can overflow we cannot optimize this. */
6200 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6201 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6202 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6203 {
6204 *strict_overflow_p = true;
6205 return omit_one_operand (type, integer_zero_node, op0);
6206 }
6207
6208 /* ... fall through ... */
6209
6210 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6211 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6212 /* If we can extract our operation from the LHS, do so and return a
6213 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6214 do something only if the second operand is a constant. */
6215 if (same_p
6216 && (t1 = extract_muldiv (op0, c, code, wide_type,
6217 strict_overflow_p)) != 0)
6218 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6219 fold_convert (ctype, op1));
6220 else if (tcode == MULT_EXPR && code == MULT_EXPR
6221 && (t1 = extract_muldiv (op1, c, code, wide_type,
6222 strict_overflow_p)) != 0)
6223 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6224 fold_convert (ctype, t1));
6225 else if (TREE_CODE (op1) != INTEGER_CST)
6226 return 0;
6227
6228 /* If these are the same operation types, we can associate them
6229 assuming no overflow. */
6230 if (tcode == code)
6231 {
6232 bool overflow_p = false;
6233 bool overflow_mul_p;
6234 signop sign = TYPE_SIGN (ctype);
6235 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6236 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6237 if (overflow_mul_p
6238 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6239 overflow_p = true;
6240 if (!overflow_p)
6241 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6242 wide_int_to_tree (ctype, mul));
6243 }
6244
6245 /* If these operations "cancel" each other, we have the main
6246 optimizations of this pass, which occur when either constant is a
6247 multiple of the other, in which case we replace this with either an
6248 operation or CODE or TCODE.
6249
6250 If we have an unsigned type, we cannot do this since it will change
6251 the result if the original computation overflowed. */
6252 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6253 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6254 || (tcode == MULT_EXPR
6255 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6256 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6257 && code != MULT_EXPR)))
6258 {
6259 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6260 {
6261 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6262 *strict_overflow_p = true;
6263 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6264 fold_convert (ctype,
6265 const_binop (TRUNC_DIV_EXPR,
6266 op1, c)));
6267 }
6268 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6269 {
6270 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6271 *strict_overflow_p = true;
6272 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6273 fold_convert (ctype,
6274 const_binop (TRUNC_DIV_EXPR,
6275 c, op1)));
6276 }
6277 }
6278 break;
6279
6280 default:
6281 break;
6282 }
6283
6284 return 0;
6285 }
6286 \f
6287 /* Return a node which has the indicated constant VALUE (either 0 or
6288 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6289 and is of the indicated TYPE. */
6290
6291 tree
6292 constant_boolean_node (bool value, tree type)
6293 {
6294 if (type == integer_type_node)
6295 return value ? integer_one_node : integer_zero_node;
6296 else if (type == boolean_type_node)
6297 return value ? boolean_true_node : boolean_false_node;
6298 else if (TREE_CODE (type) == VECTOR_TYPE)
6299 return build_vector_from_val (type,
6300 build_int_cst (TREE_TYPE (type),
6301 value ? -1 : 0));
6302 else
6303 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6304 }
6305
6306
6307 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6308 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6309 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6310 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6311 COND is the first argument to CODE; otherwise (as in the example
6312 given here), it is the second argument. TYPE is the type of the
6313 original expression. Return NULL_TREE if no simplification is
6314 possible. */
6315
6316 static tree
6317 fold_binary_op_with_conditional_arg (location_t loc,
6318 enum tree_code code,
6319 tree type, tree op0, tree op1,
6320 tree cond, tree arg, int cond_first_p)
6321 {
6322 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6323 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6324 tree test, true_value, false_value;
6325 tree lhs = NULL_TREE;
6326 tree rhs = NULL_TREE;
6327 enum tree_code cond_code = COND_EXPR;
6328
6329 if (TREE_CODE (cond) == COND_EXPR
6330 || TREE_CODE (cond) == VEC_COND_EXPR)
6331 {
6332 test = TREE_OPERAND (cond, 0);
6333 true_value = TREE_OPERAND (cond, 1);
6334 false_value = TREE_OPERAND (cond, 2);
6335 /* If this operand throws an expression, then it does not make
6336 sense to try to perform a logical or arithmetic operation
6337 involving it. */
6338 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6339 lhs = true_value;
6340 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6341 rhs = false_value;
6342 }
6343 else
6344 {
6345 tree testtype = TREE_TYPE (cond);
6346 test = cond;
6347 true_value = constant_boolean_node (true, testtype);
6348 false_value = constant_boolean_node (false, testtype);
6349 }
6350
6351 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6352 cond_code = VEC_COND_EXPR;
6353
6354 /* This transformation is only worthwhile if we don't have to wrap ARG
6355 in a SAVE_EXPR and the operation can be simplified without recursing
6356 on at least one of the branches once its pushed inside the COND_EXPR. */
6357 if (!TREE_CONSTANT (arg)
6358 && (TREE_SIDE_EFFECTS (arg)
6359 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6360 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6361 return NULL_TREE;
6362
6363 arg = fold_convert_loc (loc, arg_type, arg);
6364 if (lhs == 0)
6365 {
6366 true_value = fold_convert_loc (loc, cond_type, true_value);
6367 if (cond_first_p)
6368 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6369 else
6370 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6371 }
6372 if (rhs == 0)
6373 {
6374 false_value = fold_convert_loc (loc, cond_type, false_value);
6375 if (cond_first_p)
6376 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6377 else
6378 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6379 }
6380
6381 /* Check that we have simplified at least one of the branches. */
6382 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6383 return NULL_TREE;
6384
6385 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6386 }
6387
6388 \f
6389 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6390
6391 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6392 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6393 ADDEND is the same as X.
6394
6395 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6396 and finite. The problematic cases are when X is zero, and its mode
6397 has signed zeros. In the case of rounding towards -infinity,
6398 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6399 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6400
6401 bool
6402 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6403 {
6404 if (!real_zerop (addend))
6405 return false;
6406
6407 /* Don't allow the fold with -fsignaling-nans. */
6408 if (HONOR_SNANS (element_mode (type)))
6409 return false;
6410
6411 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6412 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6413 return true;
6414
6415 /* In a vector or complex, we would need to check the sign of all zeros. */
6416 if (TREE_CODE (addend) != REAL_CST)
6417 return false;
6418
6419 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6420 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6421 negate = !negate;
6422
6423 /* The mode has signed zeros, and we have to honor their sign.
6424 In this situation, there is only one case we can return true for.
6425 X - 0 is the same as X unless rounding towards -infinity is
6426 supported. */
6427 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6428 }
6429
6430 /* Subroutine of fold() that checks comparisons of built-in math
6431 functions against real constants.
6432
6433 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6434 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6435 is the type of the result and ARG0 and ARG1 are the operands of the
6436 comparison. ARG1 must be a TREE_REAL_CST.
6437
6438 The function returns the constant folded tree if a simplification
6439 can be made, and NULL_TREE otherwise. */
6440
6441 static tree
6442 fold_mathfn_compare (location_t loc,
6443 enum built_in_function fcode, enum tree_code code,
6444 tree type, tree arg0, tree arg1)
6445 {
6446 REAL_VALUE_TYPE c;
6447
6448 if (BUILTIN_SQRT_P (fcode))
6449 {
6450 tree arg = CALL_EXPR_ARG (arg0, 0);
6451 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6452
6453 c = TREE_REAL_CST (arg1);
6454 if (REAL_VALUE_NEGATIVE (c))
6455 {
6456 /* sqrt(x) < y is always false, if y is negative. */
6457 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6458 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6459
6460 /* sqrt(x) > y is always true, if y is negative and we
6461 don't care about NaNs, i.e. negative values of x. */
6462 if (code == NE_EXPR || !HONOR_NANS (mode))
6463 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6464
6465 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6466 return fold_build2_loc (loc, GE_EXPR, type, arg,
6467 build_real (TREE_TYPE (arg), dconst0));
6468 }
6469 else if (code == GT_EXPR || code == GE_EXPR)
6470 {
6471 REAL_VALUE_TYPE c2;
6472
6473 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6474 real_convert (&c2, mode, &c2);
6475
6476 if (REAL_VALUE_ISINF (c2))
6477 {
6478 /* sqrt(x) > y is x == +Inf, when y is very large. */
6479 if (HONOR_INFINITIES (mode))
6480 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6481 build_real (TREE_TYPE (arg), c2));
6482
6483 /* sqrt(x) > y is always false, when y is very large
6484 and we don't care about infinities. */
6485 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6486 }
6487
6488 /* sqrt(x) > c is the same as x > c*c. */
6489 return fold_build2_loc (loc, code, type, arg,
6490 build_real (TREE_TYPE (arg), c2));
6491 }
6492 else if (code == LT_EXPR || code == LE_EXPR)
6493 {
6494 REAL_VALUE_TYPE c2;
6495
6496 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6497 real_convert (&c2, mode, &c2);
6498
6499 if (REAL_VALUE_ISINF (c2))
6500 {
6501 /* sqrt(x) < y is always true, when y is a very large
6502 value and we don't care about NaNs or Infinities. */
6503 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6504 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6505
6506 /* sqrt(x) < y is x != +Inf when y is very large and we
6507 don't care about NaNs. */
6508 if (! HONOR_NANS (mode))
6509 return fold_build2_loc (loc, NE_EXPR, type, arg,
6510 build_real (TREE_TYPE (arg), c2));
6511
6512 /* sqrt(x) < y is x >= 0 when y is very large and we
6513 don't care about Infinities. */
6514 if (! HONOR_INFINITIES (mode))
6515 return fold_build2_loc (loc, GE_EXPR, type, arg,
6516 build_real (TREE_TYPE (arg), dconst0));
6517
6518 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6519 arg = save_expr (arg);
6520 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6521 fold_build2_loc (loc, GE_EXPR, type, arg,
6522 build_real (TREE_TYPE (arg),
6523 dconst0)),
6524 fold_build2_loc (loc, NE_EXPR, type, arg,
6525 build_real (TREE_TYPE (arg),
6526 c2)));
6527 }
6528
6529 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6530 if (! HONOR_NANS (mode))
6531 return fold_build2_loc (loc, code, type, arg,
6532 build_real (TREE_TYPE (arg), c2));
6533
6534 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6535 arg = save_expr (arg);
6536 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6537 fold_build2_loc (loc, GE_EXPR, type, arg,
6538 build_real (TREE_TYPE (arg),
6539 dconst0)),
6540 fold_build2_loc (loc, code, type, arg,
6541 build_real (TREE_TYPE (arg),
6542 c2)));
6543 }
6544 }
6545
6546 return NULL_TREE;
6547 }
6548
6549 /* Subroutine of fold() that optimizes comparisons against Infinities,
6550 either +Inf or -Inf.
6551
6552 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6553 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6554 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6555
6556 The function returns the constant folded tree if a simplification
6557 can be made, and NULL_TREE otherwise. */
6558
6559 static tree
6560 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6561 tree arg0, tree arg1)
6562 {
6563 machine_mode mode;
6564 REAL_VALUE_TYPE max;
6565 tree temp;
6566 bool neg;
6567
6568 mode = TYPE_MODE (TREE_TYPE (arg0));
6569
6570 /* For negative infinity swap the sense of the comparison. */
6571 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6572 if (neg)
6573 code = swap_tree_comparison (code);
6574
6575 switch (code)
6576 {
6577 case GT_EXPR:
6578 /* x > +Inf is always false, if with ignore sNANs. */
6579 if (HONOR_SNANS (mode))
6580 return NULL_TREE;
6581 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6582
6583 case LE_EXPR:
6584 /* x <= +Inf is always true, if we don't case about NaNs. */
6585 if (! HONOR_NANS (mode))
6586 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6587
6588 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6589 arg0 = save_expr (arg0);
6590 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6591
6592 case EQ_EXPR:
6593 case GE_EXPR:
6594 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6595 real_maxval (&max, neg, mode);
6596 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6597 arg0, build_real (TREE_TYPE (arg0), max));
6598
6599 case LT_EXPR:
6600 /* x < +Inf is always equal to x <= DBL_MAX. */
6601 real_maxval (&max, neg, mode);
6602 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6603 arg0, build_real (TREE_TYPE (arg0), max));
6604
6605 case NE_EXPR:
6606 /* x != +Inf is always equal to !(x > DBL_MAX). */
6607 real_maxval (&max, neg, mode);
6608 if (! HONOR_NANS (mode))
6609 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6610 arg0, build_real (TREE_TYPE (arg0), max));
6611
6612 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6613 arg0, build_real (TREE_TYPE (arg0), max));
6614 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6615
6616 default:
6617 break;
6618 }
6619
6620 return NULL_TREE;
6621 }
6622
6623 /* Subroutine of fold() that optimizes comparisons of a division by
6624 a nonzero integer constant against an integer constant, i.e.
6625 X/C1 op C2.
6626
6627 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6628 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6629 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6630
6631 The function returns the constant folded tree if a simplification
6632 can be made, and NULL_TREE otherwise. */
6633
6634 static tree
6635 fold_div_compare (location_t loc,
6636 enum tree_code code, tree type, tree arg0, tree arg1)
6637 {
6638 tree prod, tmp, hi, lo;
6639 tree arg00 = TREE_OPERAND (arg0, 0);
6640 tree arg01 = TREE_OPERAND (arg0, 1);
6641 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6642 bool neg_overflow = false;
6643 bool overflow;
6644
6645 /* We have to do this the hard way to detect unsigned overflow.
6646 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6647 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6648 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6649 neg_overflow = false;
6650
6651 if (sign == UNSIGNED)
6652 {
6653 tmp = int_const_binop (MINUS_EXPR, arg01,
6654 build_int_cst (TREE_TYPE (arg01), 1));
6655 lo = prod;
6656
6657 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6658 val = wi::add (prod, tmp, sign, &overflow);
6659 hi = force_fit_type (TREE_TYPE (arg00), val,
6660 -1, overflow | TREE_OVERFLOW (prod));
6661 }
6662 else if (tree_int_cst_sgn (arg01) >= 0)
6663 {
6664 tmp = int_const_binop (MINUS_EXPR, arg01,
6665 build_int_cst (TREE_TYPE (arg01), 1));
6666 switch (tree_int_cst_sgn (arg1))
6667 {
6668 case -1:
6669 neg_overflow = true;
6670 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6671 hi = prod;
6672 break;
6673
6674 case 0:
6675 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6676 hi = tmp;
6677 break;
6678
6679 case 1:
6680 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6681 lo = prod;
6682 break;
6683
6684 default:
6685 gcc_unreachable ();
6686 }
6687 }
6688 else
6689 {
6690 /* A negative divisor reverses the relational operators. */
6691 code = swap_tree_comparison (code);
6692
6693 tmp = int_const_binop (PLUS_EXPR, arg01,
6694 build_int_cst (TREE_TYPE (arg01), 1));
6695 switch (tree_int_cst_sgn (arg1))
6696 {
6697 case -1:
6698 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6699 lo = prod;
6700 break;
6701
6702 case 0:
6703 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6704 lo = tmp;
6705 break;
6706
6707 case 1:
6708 neg_overflow = true;
6709 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6710 hi = prod;
6711 break;
6712
6713 default:
6714 gcc_unreachable ();
6715 }
6716 }
6717
6718 switch (code)
6719 {
6720 case EQ_EXPR:
6721 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6722 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6723 if (TREE_OVERFLOW (hi))
6724 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6725 if (TREE_OVERFLOW (lo))
6726 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6727 return build_range_check (loc, type, arg00, 1, lo, hi);
6728
6729 case NE_EXPR:
6730 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6731 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6732 if (TREE_OVERFLOW (hi))
6733 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6734 if (TREE_OVERFLOW (lo))
6735 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6736 return build_range_check (loc, type, arg00, 0, lo, hi);
6737
6738 case LT_EXPR:
6739 if (TREE_OVERFLOW (lo))
6740 {
6741 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6742 return omit_one_operand_loc (loc, type, tmp, arg00);
6743 }
6744 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6745
6746 case LE_EXPR:
6747 if (TREE_OVERFLOW (hi))
6748 {
6749 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6750 return omit_one_operand_loc (loc, type, tmp, arg00);
6751 }
6752 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6753
6754 case GT_EXPR:
6755 if (TREE_OVERFLOW (hi))
6756 {
6757 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6758 return omit_one_operand_loc (loc, type, tmp, arg00);
6759 }
6760 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6761
6762 case GE_EXPR:
6763 if (TREE_OVERFLOW (lo))
6764 {
6765 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6766 return omit_one_operand_loc (loc, type, tmp, arg00);
6767 }
6768 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6769
6770 default:
6771 break;
6772 }
6773
6774 return NULL_TREE;
6775 }
6776
6777
6778 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6779 equality/inequality test, then return a simplified form of the test
6780 using a sign testing. Otherwise return NULL. TYPE is the desired
6781 result type. */
6782
6783 static tree
6784 fold_single_bit_test_into_sign_test (location_t loc,
6785 enum tree_code code, tree arg0, tree arg1,
6786 tree result_type)
6787 {
6788 /* If this is testing a single bit, we can optimize the test. */
6789 if ((code == NE_EXPR || code == EQ_EXPR)
6790 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6791 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6792 {
6793 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6794 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6795 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6796
6797 if (arg00 != NULL_TREE
6798 /* This is only a win if casting to a signed type is cheap,
6799 i.e. when arg00's type is not a partial mode. */
6800 && TYPE_PRECISION (TREE_TYPE (arg00))
6801 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6802 {
6803 tree stype = signed_type_for (TREE_TYPE (arg00));
6804 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6805 result_type,
6806 fold_convert_loc (loc, stype, arg00),
6807 build_int_cst (stype, 0));
6808 }
6809 }
6810
6811 return NULL_TREE;
6812 }
6813
6814 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6815 equality/inequality test, then return a simplified form of
6816 the test using shifts and logical operations. Otherwise return
6817 NULL. TYPE is the desired result type. */
6818
6819 tree
6820 fold_single_bit_test (location_t loc, enum tree_code code,
6821 tree arg0, tree arg1, tree result_type)
6822 {
6823 /* If this is testing a single bit, we can optimize the test. */
6824 if ((code == NE_EXPR || code == EQ_EXPR)
6825 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6826 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6827 {
6828 tree inner = TREE_OPERAND (arg0, 0);
6829 tree type = TREE_TYPE (arg0);
6830 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6831 machine_mode operand_mode = TYPE_MODE (type);
6832 int ops_unsigned;
6833 tree signed_type, unsigned_type, intermediate_type;
6834 tree tem, one;
6835
6836 /* First, see if we can fold the single bit test into a sign-bit
6837 test. */
6838 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6839 result_type);
6840 if (tem)
6841 return tem;
6842
6843 /* Otherwise we have (A & C) != 0 where C is a single bit,
6844 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6845 Similarly for (A & C) == 0. */
6846
6847 /* If INNER is a right shift of a constant and it plus BITNUM does
6848 not overflow, adjust BITNUM and INNER. */
6849 if (TREE_CODE (inner) == RSHIFT_EXPR
6850 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6851 && bitnum < TYPE_PRECISION (type)
6852 && wi::ltu_p (TREE_OPERAND (inner, 1),
6853 TYPE_PRECISION (type) - bitnum))
6854 {
6855 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6856 inner = TREE_OPERAND (inner, 0);
6857 }
6858
6859 /* If we are going to be able to omit the AND below, we must do our
6860 operations as unsigned. If we must use the AND, we have a choice.
6861 Normally unsigned is faster, but for some machines signed is. */
6862 #ifdef LOAD_EXTEND_OP
6863 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6864 && !flag_syntax_only) ? 0 : 1;
6865 #else
6866 ops_unsigned = 1;
6867 #endif
6868
6869 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6870 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6871 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6872 inner = fold_convert_loc (loc, intermediate_type, inner);
6873
6874 if (bitnum != 0)
6875 inner = build2 (RSHIFT_EXPR, intermediate_type,
6876 inner, size_int (bitnum));
6877
6878 one = build_int_cst (intermediate_type, 1);
6879
6880 if (code == EQ_EXPR)
6881 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6882
6883 /* Put the AND last so it can combine with more things. */
6884 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6885
6886 /* Make sure to return the proper type. */
6887 inner = fold_convert_loc (loc, result_type, inner);
6888
6889 return inner;
6890 }
6891 return NULL_TREE;
6892 }
6893
6894 /* Check whether we are allowed to reorder operands arg0 and arg1,
6895 such that the evaluation of arg1 occurs before arg0. */
6896
6897 static bool
6898 reorder_operands_p (const_tree arg0, const_tree arg1)
6899 {
6900 if (! flag_evaluation_order)
6901 return true;
6902 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6903 return true;
6904 return ! TREE_SIDE_EFFECTS (arg0)
6905 && ! TREE_SIDE_EFFECTS (arg1);
6906 }
6907
6908 /* Test whether it is preferable two swap two operands, ARG0 and
6909 ARG1, for example because ARG0 is an integer constant and ARG1
6910 isn't. If REORDER is true, only recommend swapping if we can
6911 evaluate the operands in reverse order. */
6912
6913 bool
6914 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6915 {
6916 if (CONSTANT_CLASS_P (arg1))
6917 return 0;
6918 if (CONSTANT_CLASS_P (arg0))
6919 return 1;
6920
6921 STRIP_NOPS (arg0);
6922 STRIP_NOPS (arg1);
6923
6924 if (TREE_CONSTANT (arg1))
6925 return 0;
6926 if (TREE_CONSTANT (arg0))
6927 return 1;
6928
6929 if (reorder && flag_evaluation_order
6930 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6931 return 0;
6932
6933 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6934 for commutative and comparison operators. Ensuring a canonical
6935 form allows the optimizers to find additional redundancies without
6936 having to explicitly check for both orderings. */
6937 if (TREE_CODE (arg0) == SSA_NAME
6938 && TREE_CODE (arg1) == SSA_NAME
6939 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6940 return 1;
6941
6942 /* Put SSA_NAMEs last. */
6943 if (TREE_CODE (arg1) == SSA_NAME)
6944 return 0;
6945 if (TREE_CODE (arg0) == SSA_NAME)
6946 return 1;
6947
6948 /* Put variables last. */
6949 if (DECL_P (arg1))
6950 return 0;
6951 if (DECL_P (arg0))
6952 return 1;
6953
6954 return 0;
6955 }
6956
6957 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6958 ARG0 is extended to a wider type. */
6959
6960 static tree
6961 fold_widened_comparison (location_t loc, enum tree_code code,
6962 tree type, tree arg0, tree arg1)
6963 {
6964 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6965 tree arg1_unw;
6966 tree shorter_type, outer_type;
6967 tree min, max;
6968 bool above, below;
6969
6970 if (arg0_unw == arg0)
6971 return NULL_TREE;
6972 shorter_type = TREE_TYPE (arg0_unw);
6973
6974 #ifdef HAVE_canonicalize_funcptr_for_compare
6975 /* Disable this optimization if we're casting a function pointer
6976 type on targets that require function pointer canonicalization. */
6977 if (HAVE_canonicalize_funcptr_for_compare
6978 && TREE_CODE (shorter_type) == POINTER_TYPE
6979 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6980 return NULL_TREE;
6981 #endif
6982
6983 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6984 return NULL_TREE;
6985
6986 arg1_unw = get_unwidened (arg1, NULL_TREE);
6987
6988 /* If possible, express the comparison in the shorter mode. */
6989 if ((code == EQ_EXPR || code == NE_EXPR
6990 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6991 && (TREE_TYPE (arg1_unw) == shorter_type
6992 || ((TYPE_PRECISION (shorter_type)
6993 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6994 && (TYPE_UNSIGNED (shorter_type)
6995 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6996 || (TREE_CODE (arg1_unw) == INTEGER_CST
6997 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6998 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6999 && int_fits_type_p (arg1_unw, shorter_type))))
7000 return fold_build2_loc (loc, code, type, arg0_unw,
7001 fold_convert_loc (loc, shorter_type, arg1_unw));
7002
7003 if (TREE_CODE (arg1_unw) != INTEGER_CST
7004 || TREE_CODE (shorter_type) != INTEGER_TYPE
7005 || !int_fits_type_p (arg1_unw, shorter_type))
7006 return NULL_TREE;
7007
7008 /* If we are comparing with the integer that does not fit into the range
7009 of the shorter type, the result is known. */
7010 outer_type = TREE_TYPE (arg1_unw);
7011 min = lower_bound_in_type (outer_type, shorter_type);
7012 max = upper_bound_in_type (outer_type, shorter_type);
7013
7014 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7015 max, arg1_unw));
7016 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7017 arg1_unw, min));
7018
7019 switch (code)
7020 {
7021 case EQ_EXPR:
7022 if (above || below)
7023 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7024 break;
7025
7026 case NE_EXPR:
7027 if (above || below)
7028 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7029 break;
7030
7031 case LT_EXPR:
7032 case LE_EXPR:
7033 if (above)
7034 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7035 else if (below)
7036 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7037
7038 case GT_EXPR:
7039 case GE_EXPR:
7040 if (above)
7041 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7042 else if (below)
7043 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7044
7045 default:
7046 break;
7047 }
7048
7049 return NULL_TREE;
7050 }
7051
7052 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7053 ARG0 just the signedness is changed. */
7054
7055 static tree
7056 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7057 tree arg0, tree arg1)
7058 {
7059 tree arg0_inner;
7060 tree inner_type, outer_type;
7061
7062 if (!CONVERT_EXPR_P (arg0))
7063 return NULL_TREE;
7064
7065 outer_type = TREE_TYPE (arg0);
7066 arg0_inner = TREE_OPERAND (arg0, 0);
7067 inner_type = TREE_TYPE (arg0_inner);
7068
7069 #ifdef HAVE_canonicalize_funcptr_for_compare
7070 /* Disable this optimization if we're casting a function pointer
7071 type on targets that require function pointer canonicalization. */
7072 if (HAVE_canonicalize_funcptr_for_compare
7073 && TREE_CODE (inner_type) == POINTER_TYPE
7074 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7075 return NULL_TREE;
7076 #endif
7077
7078 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7079 return NULL_TREE;
7080
7081 if (TREE_CODE (arg1) != INTEGER_CST
7082 && !(CONVERT_EXPR_P (arg1)
7083 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7084 return NULL_TREE;
7085
7086 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7087 && code != NE_EXPR
7088 && code != EQ_EXPR)
7089 return NULL_TREE;
7090
7091 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7092 return NULL_TREE;
7093
7094 if (TREE_CODE (arg1) == INTEGER_CST)
7095 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7096 TREE_OVERFLOW (arg1));
7097 else
7098 arg1 = fold_convert_loc (loc, inner_type, arg1);
7099
7100 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7101 }
7102
7103
7104 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7105 means A >= Y && A != MAX, but in this case we know that
7106 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7107
7108 static tree
7109 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7110 {
7111 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7112
7113 if (TREE_CODE (bound) == LT_EXPR)
7114 a = TREE_OPERAND (bound, 0);
7115 else if (TREE_CODE (bound) == GT_EXPR)
7116 a = TREE_OPERAND (bound, 1);
7117 else
7118 return NULL_TREE;
7119
7120 typea = TREE_TYPE (a);
7121 if (!INTEGRAL_TYPE_P (typea)
7122 && !POINTER_TYPE_P (typea))
7123 return NULL_TREE;
7124
7125 if (TREE_CODE (ineq) == LT_EXPR)
7126 {
7127 a1 = TREE_OPERAND (ineq, 1);
7128 y = TREE_OPERAND (ineq, 0);
7129 }
7130 else if (TREE_CODE (ineq) == GT_EXPR)
7131 {
7132 a1 = TREE_OPERAND (ineq, 0);
7133 y = TREE_OPERAND (ineq, 1);
7134 }
7135 else
7136 return NULL_TREE;
7137
7138 if (TREE_TYPE (a1) != typea)
7139 return NULL_TREE;
7140
7141 if (POINTER_TYPE_P (typea))
7142 {
7143 /* Convert the pointer types into integer before taking the difference. */
7144 tree ta = fold_convert_loc (loc, ssizetype, a);
7145 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7146 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7147 }
7148 else
7149 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7150
7151 if (!diff || !integer_onep (diff))
7152 return NULL_TREE;
7153
7154 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7155 }
7156
7157 /* Fold a sum or difference of at least one multiplication.
7158 Returns the folded tree or NULL if no simplification could be made. */
7159
7160 static tree
7161 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7162 tree arg0, tree arg1)
7163 {
7164 tree arg00, arg01, arg10, arg11;
7165 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7166
7167 /* (A * C) +- (B * C) -> (A+-B) * C.
7168 (A * C) +- A -> A * (C+-1).
7169 We are most concerned about the case where C is a constant,
7170 but other combinations show up during loop reduction. Since
7171 it is not difficult, try all four possibilities. */
7172
7173 if (TREE_CODE (arg0) == MULT_EXPR)
7174 {
7175 arg00 = TREE_OPERAND (arg0, 0);
7176 arg01 = TREE_OPERAND (arg0, 1);
7177 }
7178 else if (TREE_CODE (arg0) == INTEGER_CST)
7179 {
7180 arg00 = build_one_cst (type);
7181 arg01 = arg0;
7182 }
7183 else
7184 {
7185 /* We cannot generate constant 1 for fract. */
7186 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7187 return NULL_TREE;
7188 arg00 = arg0;
7189 arg01 = build_one_cst (type);
7190 }
7191 if (TREE_CODE (arg1) == MULT_EXPR)
7192 {
7193 arg10 = TREE_OPERAND (arg1, 0);
7194 arg11 = TREE_OPERAND (arg1, 1);
7195 }
7196 else if (TREE_CODE (arg1) == INTEGER_CST)
7197 {
7198 arg10 = build_one_cst (type);
7199 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7200 the purpose of this canonicalization. */
7201 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7202 && negate_expr_p (arg1)
7203 && code == PLUS_EXPR)
7204 {
7205 arg11 = negate_expr (arg1);
7206 code = MINUS_EXPR;
7207 }
7208 else
7209 arg11 = arg1;
7210 }
7211 else
7212 {
7213 /* We cannot generate constant 1 for fract. */
7214 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7215 return NULL_TREE;
7216 arg10 = arg1;
7217 arg11 = build_one_cst (type);
7218 }
7219 same = NULL_TREE;
7220
7221 if (operand_equal_p (arg01, arg11, 0))
7222 same = arg01, alt0 = arg00, alt1 = arg10;
7223 else if (operand_equal_p (arg00, arg10, 0))
7224 same = arg00, alt0 = arg01, alt1 = arg11;
7225 else if (operand_equal_p (arg00, arg11, 0))
7226 same = arg00, alt0 = arg01, alt1 = arg10;
7227 else if (operand_equal_p (arg01, arg10, 0))
7228 same = arg01, alt0 = arg00, alt1 = arg11;
7229
7230 /* No identical multiplicands; see if we can find a common
7231 power-of-two factor in non-power-of-two multiplies. This
7232 can help in multi-dimensional array access. */
7233 else if (tree_fits_shwi_p (arg01)
7234 && tree_fits_shwi_p (arg11))
7235 {
7236 HOST_WIDE_INT int01, int11, tmp;
7237 bool swap = false;
7238 tree maybe_same;
7239 int01 = tree_to_shwi (arg01);
7240 int11 = tree_to_shwi (arg11);
7241
7242 /* Move min of absolute values to int11. */
7243 if (absu_hwi (int01) < absu_hwi (int11))
7244 {
7245 tmp = int01, int01 = int11, int11 = tmp;
7246 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7247 maybe_same = arg01;
7248 swap = true;
7249 }
7250 else
7251 maybe_same = arg11;
7252
7253 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7254 /* The remainder should not be a constant, otherwise we
7255 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7256 increased the number of multiplications necessary. */
7257 && TREE_CODE (arg10) != INTEGER_CST)
7258 {
7259 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7260 build_int_cst (TREE_TYPE (arg00),
7261 int01 / int11));
7262 alt1 = arg10;
7263 same = maybe_same;
7264 if (swap)
7265 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7266 }
7267 }
7268
7269 if (same)
7270 return fold_build2_loc (loc, MULT_EXPR, type,
7271 fold_build2_loc (loc, code, type,
7272 fold_convert_loc (loc, type, alt0),
7273 fold_convert_loc (loc, type, alt1)),
7274 fold_convert_loc (loc, type, same));
7275
7276 return NULL_TREE;
7277 }
7278
7279 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7280 specified by EXPR into the buffer PTR of length LEN bytes.
7281 Return the number of bytes placed in the buffer, or zero
7282 upon failure. */
7283
7284 static int
7285 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7286 {
7287 tree type = TREE_TYPE (expr);
7288 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7289 int byte, offset, word, words;
7290 unsigned char value;
7291
7292 if ((off == -1 && total_bytes > len)
7293 || off >= total_bytes)
7294 return 0;
7295 if (off == -1)
7296 off = 0;
7297 words = total_bytes / UNITS_PER_WORD;
7298
7299 for (byte = 0; byte < total_bytes; byte++)
7300 {
7301 int bitpos = byte * BITS_PER_UNIT;
7302 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7303 number of bytes. */
7304 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7305
7306 if (total_bytes > UNITS_PER_WORD)
7307 {
7308 word = byte / UNITS_PER_WORD;
7309 if (WORDS_BIG_ENDIAN)
7310 word = (words - 1) - word;
7311 offset = word * UNITS_PER_WORD;
7312 if (BYTES_BIG_ENDIAN)
7313 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7314 else
7315 offset += byte % UNITS_PER_WORD;
7316 }
7317 else
7318 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7319 if (offset >= off
7320 && offset - off < len)
7321 ptr[offset - off] = value;
7322 }
7323 return MIN (len, total_bytes - off);
7324 }
7325
7326
7327 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7328 specified by EXPR into the buffer PTR of length LEN bytes.
7329 Return the number of bytes placed in the buffer, or zero
7330 upon failure. */
7331
7332 static int
7333 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7334 {
7335 tree type = TREE_TYPE (expr);
7336 machine_mode mode = TYPE_MODE (type);
7337 int total_bytes = GET_MODE_SIZE (mode);
7338 FIXED_VALUE_TYPE value;
7339 tree i_value, i_type;
7340
7341 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7342 return 0;
7343
7344 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7345
7346 if (NULL_TREE == i_type
7347 || TYPE_PRECISION (i_type) != total_bytes)
7348 return 0;
7349
7350 value = TREE_FIXED_CST (expr);
7351 i_value = double_int_to_tree (i_type, value.data);
7352
7353 return native_encode_int (i_value, ptr, len, off);
7354 }
7355
7356
7357 /* Subroutine of native_encode_expr. Encode the REAL_CST
7358 specified by EXPR into the buffer PTR of length LEN bytes.
7359 Return the number of bytes placed in the buffer, or zero
7360 upon failure. */
7361
7362 static int
7363 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7364 {
7365 tree type = TREE_TYPE (expr);
7366 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7367 int byte, offset, word, words, bitpos;
7368 unsigned char value;
7369
7370 /* There are always 32 bits in each long, no matter the size of
7371 the hosts long. We handle floating point representations with
7372 up to 192 bits. */
7373 long tmp[6];
7374
7375 if ((off == -1 && total_bytes > len)
7376 || off >= total_bytes)
7377 return 0;
7378 if (off == -1)
7379 off = 0;
7380 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7381
7382 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7383
7384 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7385 bitpos += BITS_PER_UNIT)
7386 {
7387 byte = (bitpos / BITS_PER_UNIT) & 3;
7388 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7389
7390 if (UNITS_PER_WORD < 4)
7391 {
7392 word = byte / UNITS_PER_WORD;
7393 if (WORDS_BIG_ENDIAN)
7394 word = (words - 1) - word;
7395 offset = word * UNITS_PER_WORD;
7396 if (BYTES_BIG_ENDIAN)
7397 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7398 else
7399 offset += byte % UNITS_PER_WORD;
7400 }
7401 else
7402 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7403 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7404 if (offset >= off
7405 && offset - off < len)
7406 ptr[offset - off] = value;
7407 }
7408 return MIN (len, total_bytes - off);
7409 }
7410
7411 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7412 specified by EXPR into the buffer PTR of length LEN bytes.
7413 Return the number of bytes placed in the buffer, or zero
7414 upon failure. */
7415
7416 static int
7417 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7418 {
7419 int rsize, isize;
7420 tree part;
7421
7422 part = TREE_REALPART (expr);
7423 rsize = native_encode_expr (part, ptr, len, off);
7424 if (off == -1
7425 && rsize == 0)
7426 return 0;
7427 part = TREE_IMAGPART (expr);
7428 if (off != -1)
7429 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7430 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7431 if (off == -1
7432 && isize != rsize)
7433 return 0;
7434 return rsize + isize;
7435 }
7436
7437
7438 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7439 specified by EXPR into the buffer PTR of length LEN bytes.
7440 Return the number of bytes placed in the buffer, or zero
7441 upon failure. */
7442
7443 static int
7444 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7445 {
7446 unsigned i, count;
7447 int size, offset;
7448 tree itype, elem;
7449
7450 offset = 0;
7451 count = VECTOR_CST_NELTS (expr);
7452 itype = TREE_TYPE (TREE_TYPE (expr));
7453 size = GET_MODE_SIZE (TYPE_MODE (itype));
7454 for (i = 0; i < count; i++)
7455 {
7456 if (off >= size)
7457 {
7458 off -= size;
7459 continue;
7460 }
7461 elem = VECTOR_CST_ELT (expr, i);
7462 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7463 if ((off == -1 && res != size)
7464 || res == 0)
7465 return 0;
7466 offset += res;
7467 if (offset >= len)
7468 return offset;
7469 if (off != -1)
7470 off = 0;
7471 }
7472 return offset;
7473 }
7474
7475
7476 /* Subroutine of native_encode_expr. Encode the STRING_CST
7477 specified by EXPR into the buffer PTR of length LEN bytes.
7478 Return the number of bytes placed in the buffer, or zero
7479 upon failure. */
7480
7481 static int
7482 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7483 {
7484 tree type = TREE_TYPE (expr);
7485 HOST_WIDE_INT total_bytes;
7486
7487 if (TREE_CODE (type) != ARRAY_TYPE
7488 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7489 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7490 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7491 return 0;
7492 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7493 if ((off == -1 && total_bytes > len)
7494 || off >= total_bytes)
7495 return 0;
7496 if (off == -1)
7497 off = 0;
7498 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7499 {
7500 int written = 0;
7501 if (off < TREE_STRING_LENGTH (expr))
7502 {
7503 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7504 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7505 }
7506 memset (ptr + written, 0,
7507 MIN (total_bytes - written, len - written));
7508 }
7509 else
7510 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7511 return MIN (total_bytes - off, len);
7512 }
7513
7514
7515 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7516 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7517 buffer PTR of length LEN bytes. If OFF is not -1 then start
7518 the encoding at byte offset OFF and encode at most LEN bytes.
7519 Return the number of bytes placed in the buffer, or zero upon failure. */
7520
7521 int
7522 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7523 {
7524 switch (TREE_CODE (expr))
7525 {
7526 case INTEGER_CST:
7527 return native_encode_int (expr, ptr, len, off);
7528
7529 case REAL_CST:
7530 return native_encode_real (expr, ptr, len, off);
7531
7532 case FIXED_CST:
7533 return native_encode_fixed (expr, ptr, len, off);
7534
7535 case COMPLEX_CST:
7536 return native_encode_complex (expr, ptr, len, off);
7537
7538 case VECTOR_CST:
7539 return native_encode_vector (expr, ptr, len, off);
7540
7541 case STRING_CST:
7542 return native_encode_string (expr, ptr, len, off);
7543
7544 default:
7545 return 0;
7546 }
7547 }
7548
7549
7550 /* Subroutine of native_interpret_expr. Interpret the contents of
7551 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7552 If the buffer cannot be interpreted, return NULL_TREE. */
7553
7554 static tree
7555 native_interpret_int (tree type, const unsigned char *ptr, int len)
7556 {
7557 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7558
7559 if (total_bytes > len
7560 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7561 return NULL_TREE;
7562
7563 wide_int result = wi::from_buffer (ptr, total_bytes);
7564
7565 return wide_int_to_tree (type, result);
7566 }
7567
7568
7569 /* Subroutine of native_interpret_expr. Interpret the contents of
7570 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7571 If the buffer cannot be interpreted, return NULL_TREE. */
7572
7573 static tree
7574 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7575 {
7576 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7577 double_int result;
7578 FIXED_VALUE_TYPE fixed_value;
7579
7580 if (total_bytes > len
7581 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7582 return NULL_TREE;
7583
7584 result = double_int::from_buffer (ptr, total_bytes);
7585 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7586
7587 return build_fixed (type, fixed_value);
7588 }
7589
7590
7591 /* Subroutine of native_interpret_expr. Interpret the contents of
7592 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7593 If the buffer cannot be interpreted, return NULL_TREE. */
7594
7595 static tree
7596 native_interpret_real (tree type, const unsigned char *ptr, int len)
7597 {
7598 machine_mode mode = TYPE_MODE (type);
7599 int total_bytes = GET_MODE_SIZE (mode);
7600 int byte, offset, word, words, bitpos;
7601 unsigned char value;
7602 /* There are always 32 bits in each long, no matter the size of
7603 the hosts long. We handle floating point representations with
7604 up to 192 bits. */
7605 REAL_VALUE_TYPE r;
7606 long tmp[6];
7607
7608 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7609 if (total_bytes > len || total_bytes > 24)
7610 return NULL_TREE;
7611 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7612
7613 memset (tmp, 0, sizeof (tmp));
7614 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7615 bitpos += BITS_PER_UNIT)
7616 {
7617 byte = (bitpos / BITS_PER_UNIT) & 3;
7618 if (UNITS_PER_WORD < 4)
7619 {
7620 word = byte / UNITS_PER_WORD;
7621 if (WORDS_BIG_ENDIAN)
7622 word = (words - 1) - word;
7623 offset = word * UNITS_PER_WORD;
7624 if (BYTES_BIG_ENDIAN)
7625 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7626 else
7627 offset += byte % UNITS_PER_WORD;
7628 }
7629 else
7630 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7631 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7632
7633 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7634 }
7635
7636 real_from_target (&r, tmp, mode);
7637 return build_real (type, r);
7638 }
7639
7640
7641 /* Subroutine of native_interpret_expr. Interpret the contents of
7642 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7643 If the buffer cannot be interpreted, return NULL_TREE. */
7644
7645 static tree
7646 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7647 {
7648 tree etype, rpart, ipart;
7649 int size;
7650
7651 etype = TREE_TYPE (type);
7652 size = GET_MODE_SIZE (TYPE_MODE (etype));
7653 if (size * 2 > len)
7654 return NULL_TREE;
7655 rpart = native_interpret_expr (etype, ptr, size);
7656 if (!rpart)
7657 return NULL_TREE;
7658 ipart = native_interpret_expr (etype, ptr+size, size);
7659 if (!ipart)
7660 return NULL_TREE;
7661 return build_complex (type, rpart, ipart);
7662 }
7663
7664
7665 /* Subroutine of native_interpret_expr. Interpret the contents of
7666 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7667 If the buffer cannot be interpreted, return NULL_TREE. */
7668
7669 static tree
7670 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7671 {
7672 tree etype, elem;
7673 int i, size, count;
7674 tree *elements;
7675
7676 etype = TREE_TYPE (type);
7677 size = GET_MODE_SIZE (TYPE_MODE (etype));
7678 count = TYPE_VECTOR_SUBPARTS (type);
7679 if (size * count > len)
7680 return NULL_TREE;
7681
7682 elements = XALLOCAVEC (tree, count);
7683 for (i = count - 1; i >= 0; i--)
7684 {
7685 elem = native_interpret_expr (etype, ptr+(i*size), size);
7686 if (!elem)
7687 return NULL_TREE;
7688 elements[i] = elem;
7689 }
7690 return build_vector (type, elements);
7691 }
7692
7693
7694 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7695 the buffer PTR of length LEN as a constant of type TYPE. For
7696 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7697 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7698 return NULL_TREE. */
7699
7700 tree
7701 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7702 {
7703 switch (TREE_CODE (type))
7704 {
7705 case INTEGER_TYPE:
7706 case ENUMERAL_TYPE:
7707 case BOOLEAN_TYPE:
7708 case POINTER_TYPE:
7709 case REFERENCE_TYPE:
7710 return native_interpret_int (type, ptr, len);
7711
7712 case REAL_TYPE:
7713 return native_interpret_real (type, ptr, len);
7714
7715 case FIXED_POINT_TYPE:
7716 return native_interpret_fixed (type, ptr, len);
7717
7718 case COMPLEX_TYPE:
7719 return native_interpret_complex (type, ptr, len);
7720
7721 case VECTOR_TYPE:
7722 return native_interpret_vector (type, ptr, len);
7723
7724 default:
7725 return NULL_TREE;
7726 }
7727 }
7728
7729 /* Returns true if we can interpret the contents of a native encoding
7730 as TYPE. */
7731
7732 static bool
7733 can_native_interpret_type_p (tree type)
7734 {
7735 switch (TREE_CODE (type))
7736 {
7737 case INTEGER_TYPE:
7738 case ENUMERAL_TYPE:
7739 case BOOLEAN_TYPE:
7740 case POINTER_TYPE:
7741 case REFERENCE_TYPE:
7742 case FIXED_POINT_TYPE:
7743 case REAL_TYPE:
7744 case COMPLEX_TYPE:
7745 case VECTOR_TYPE:
7746 return true;
7747 default:
7748 return false;
7749 }
7750 }
7751
7752 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7753 TYPE at compile-time. If we're unable to perform the conversion
7754 return NULL_TREE. */
7755
7756 static tree
7757 fold_view_convert_expr (tree type, tree expr)
7758 {
7759 /* We support up to 512-bit values (for V8DFmode). */
7760 unsigned char buffer[64];
7761 int len;
7762
7763 /* Check that the host and target are sane. */
7764 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7765 return NULL_TREE;
7766
7767 len = native_encode_expr (expr, buffer, sizeof (buffer));
7768 if (len == 0)
7769 return NULL_TREE;
7770
7771 return native_interpret_expr (type, buffer, len);
7772 }
7773
7774 /* Build an expression for the address of T. Folds away INDIRECT_REF
7775 to avoid confusing the gimplify process. */
7776
7777 tree
7778 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7779 {
7780 /* The size of the object is not relevant when talking about its address. */
7781 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7782 t = TREE_OPERAND (t, 0);
7783
7784 if (TREE_CODE (t) == INDIRECT_REF)
7785 {
7786 t = TREE_OPERAND (t, 0);
7787
7788 if (TREE_TYPE (t) != ptrtype)
7789 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7790 }
7791 else if (TREE_CODE (t) == MEM_REF
7792 && integer_zerop (TREE_OPERAND (t, 1)))
7793 return TREE_OPERAND (t, 0);
7794 else if (TREE_CODE (t) == MEM_REF
7795 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7796 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7797 TREE_OPERAND (t, 0),
7798 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7799 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7800 {
7801 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7802
7803 if (TREE_TYPE (t) != ptrtype)
7804 t = fold_convert_loc (loc, ptrtype, t);
7805 }
7806 else
7807 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7808
7809 return t;
7810 }
7811
7812 /* Build an expression for the address of T. */
7813
7814 tree
7815 build_fold_addr_expr_loc (location_t loc, tree t)
7816 {
7817 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7818
7819 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7820 }
7821
7822 /* Fold a unary expression of code CODE and type TYPE with operand
7823 OP0. Return the folded expression if folding is successful.
7824 Otherwise, return NULL_TREE. */
7825
7826 tree
7827 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7828 {
7829 tree tem;
7830 tree arg0;
7831 enum tree_code_class kind = TREE_CODE_CLASS (code);
7832
7833 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7834 && TREE_CODE_LENGTH (code) == 1);
7835
7836 arg0 = op0;
7837 if (arg0)
7838 {
7839 if (CONVERT_EXPR_CODE_P (code)
7840 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7841 {
7842 /* Don't use STRIP_NOPS, because signedness of argument type
7843 matters. */
7844 STRIP_SIGN_NOPS (arg0);
7845 }
7846 else
7847 {
7848 /* Strip any conversions that don't change the mode. This
7849 is safe for every expression, except for a comparison
7850 expression because its signedness is derived from its
7851 operands.
7852
7853 Note that this is done as an internal manipulation within
7854 the constant folder, in order to find the simplest
7855 representation of the arguments so that their form can be
7856 studied. In any cases, the appropriate type conversions
7857 should be put back in the tree that will get out of the
7858 constant folder. */
7859 STRIP_NOPS (arg0);
7860 }
7861
7862 if (CONSTANT_CLASS_P (arg0))
7863 {
7864 tree tem = const_unop (code, type, arg0);
7865 if (tem)
7866 {
7867 if (TREE_TYPE (tem) != type)
7868 tem = fold_convert_loc (loc, type, tem);
7869 return tem;
7870 }
7871 }
7872 }
7873
7874 tem = generic_simplify (loc, code, type, op0);
7875 if (tem)
7876 return tem;
7877
7878 if (TREE_CODE_CLASS (code) == tcc_unary)
7879 {
7880 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7881 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7882 fold_build1_loc (loc, code, type,
7883 fold_convert_loc (loc, TREE_TYPE (op0),
7884 TREE_OPERAND (arg0, 1))));
7885 else if (TREE_CODE (arg0) == COND_EXPR)
7886 {
7887 tree arg01 = TREE_OPERAND (arg0, 1);
7888 tree arg02 = TREE_OPERAND (arg0, 2);
7889 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7890 arg01 = fold_build1_loc (loc, code, type,
7891 fold_convert_loc (loc,
7892 TREE_TYPE (op0), arg01));
7893 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7894 arg02 = fold_build1_loc (loc, code, type,
7895 fold_convert_loc (loc,
7896 TREE_TYPE (op0), arg02));
7897 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7898 arg01, arg02);
7899
7900 /* If this was a conversion, and all we did was to move into
7901 inside the COND_EXPR, bring it back out. But leave it if
7902 it is a conversion from integer to integer and the
7903 result precision is no wider than a word since such a
7904 conversion is cheap and may be optimized away by combine,
7905 while it couldn't if it were outside the COND_EXPR. Then return
7906 so we don't get into an infinite recursion loop taking the
7907 conversion out and then back in. */
7908
7909 if ((CONVERT_EXPR_CODE_P (code)
7910 || code == NON_LVALUE_EXPR)
7911 && TREE_CODE (tem) == COND_EXPR
7912 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7913 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7914 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7915 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7916 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7917 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7918 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7919 && (INTEGRAL_TYPE_P
7920 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7921 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7922 || flag_syntax_only))
7923 tem = build1_loc (loc, code, type,
7924 build3 (COND_EXPR,
7925 TREE_TYPE (TREE_OPERAND
7926 (TREE_OPERAND (tem, 1), 0)),
7927 TREE_OPERAND (tem, 0),
7928 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7929 TREE_OPERAND (TREE_OPERAND (tem, 2),
7930 0)));
7931 return tem;
7932 }
7933 }
7934
7935 switch (code)
7936 {
7937 case NON_LVALUE_EXPR:
7938 if (!maybe_lvalue_p (op0))
7939 return fold_convert_loc (loc, type, op0);
7940 return NULL_TREE;
7941
7942 CASE_CONVERT:
7943 case FLOAT_EXPR:
7944 case FIX_TRUNC_EXPR:
7945 if (COMPARISON_CLASS_P (op0))
7946 {
7947 /* If we have (type) (a CMP b) and type is an integral type, return
7948 new expression involving the new type. Canonicalize
7949 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7950 non-integral type.
7951 Do not fold the result as that would not simplify further, also
7952 folding again results in recursions. */
7953 if (TREE_CODE (type) == BOOLEAN_TYPE)
7954 return build2_loc (loc, TREE_CODE (op0), type,
7955 TREE_OPERAND (op0, 0),
7956 TREE_OPERAND (op0, 1));
7957 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7958 && TREE_CODE (type) != VECTOR_TYPE)
7959 return build3_loc (loc, COND_EXPR, type, op0,
7960 constant_boolean_node (true, type),
7961 constant_boolean_node (false, type));
7962 }
7963
7964 /* Handle (T *)&A.B.C for A being of type T and B and C
7965 living at offset zero. This occurs frequently in
7966 C++ upcasting and then accessing the base. */
7967 if (TREE_CODE (op0) == ADDR_EXPR
7968 && POINTER_TYPE_P (type)
7969 && handled_component_p (TREE_OPERAND (op0, 0)))
7970 {
7971 HOST_WIDE_INT bitsize, bitpos;
7972 tree offset;
7973 machine_mode mode;
7974 int unsignedp, volatilep;
7975 tree base = TREE_OPERAND (op0, 0);
7976 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7977 &mode, &unsignedp, &volatilep, false);
7978 /* If the reference was to a (constant) zero offset, we can use
7979 the address of the base if it has the same base type
7980 as the result type and the pointer type is unqualified. */
7981 if (! offset && bitpos == 0
7982 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7983 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7984 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7985 return fold_convert_loc (loc, type,
7986 build_fold_addr_expr_loc (loc, base));
7987 }
7988
7989 if (TREE_CODE (op0) == MODIFY_EXPR
7990 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7991 /* Detect assigning a bitfield. */
7992 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7993 && DECL_BIT_FIELD
7994 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7995 {
7996 /* Don't leave an assignment inside a conversion
7997 unless assigning a bitfield. */
7998 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7999 /* First do the assignment, then return converted constant. */
8000 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
8001 TREE_NO_WARNING (tem) = 1;
8002 TREE_USED (tem) = 1;
8003 return tem;
8004 }
8005
8006 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8007 constants (if x has signed type, the sign bit cannot be set
8008 in c). This folds extension into the BIT_AND_EXPR.
8009 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8010 very likely don't have maximal range for their precision and this
8011 transformation effectively doesn't preserve non-maximal ranges. */
8012 if (TREE_CODE (type) == INTEGER_TYPE
8013 && TREE_CODE (op0) == BIT_AND_EXPR
8014 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8015 {
8016 tree and_expr = op0;
8017 tree and0 = TREE_OPERAND (and_expr, 0);
8018 tree and1 = TREE_OPERAND (and_expr, 1);
8019 int change = 0;
8020
8021 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8022 || (TYPE_PRECISION (type)
8023 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8024 change = 1;
8025 else if (TYPE_PRECISION (TREE_TYPE (and1))
8026 <= HOST_BITS_PER_WIDE_INT
8027 && tree_fits_uhwi_p (and1))
8028 {
8029 unsigned HOST_WIDE_INT cst;
8030
8031 cst = tree_to_uhwi (and1);
8032 cst &= HOST_WIDE_INT_M1U
8033 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8034 change = (cst == 0);
8035 #ifdef LOAD_EXTEND_OP
8036 if (change
8037 && !flag_syntax_only
8038 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8039 == ZERO_EXTEND))
8040 {
8041 tree uns = unsigned_type_for (TREE_TYPE (and0));
8042 and0 = fold_convert_loc (loc, uns, and0);
8043 and1 = fold_convert_loc (loc, uns, and1);
8044 }
8045 #endif
8046 }
8047 if (change)
8048 {
8049 tem = force_fit_type (type, wi::to_widest (and1), 0,
8050 TREE_OVERFLOW (and1));
8051 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8052 fold_convert_loc (loc, type, and0), tem);
8053 }
8054 }
8055
8056 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8057 when one of the new casts will fold away. Conservatively we assume
8058 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8059 if (POINTER_TYPE_P (type)
8060 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8061 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8062 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8063 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8064 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8065 {
8066 tree arg00 = TREE_OPERAND (arg0, 0);
8067 tree arg01 = TREE_OPERAND (arg0, 1);
8068
8069 return fold_build_pointer_plus_loc
8070 (loc, fold_convert_loc (loc, type, arg00), arg01);
8071 }
8072
8073 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8074 of the same precision, and X is an integer type not narrower than
8075 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8076 if (INTEGRAL_TYPE_P (type)
8077 && TREE_CODE (op0) == BIT_NOT_EXPR
8078 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8079 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8080 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8081 {
8082 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8083 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8084 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8085 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8086 fold_convert_loc (loc, type, tem));
8087 }
8088
8089 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8090 type of X and Y (integer types only). */
8091 if (INTEGRAL_TYPE_P (type)
8092 && TREE_CODE (op0) == MULT_EXPR
8093 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8094 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8095 {
8096 /* Be careful not to introduce new overflows. */
8097 tree mult_type;
8098 if (TYPE_OVERFLOW_WRAPS (type))
8099 mult_type = type;
8100 else
8101 mult_type = unsigned_type_for (type);
8102
8103 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8104 {
8105 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8106 fold_convert_loc (loc, mult_type,
8107 TREE_OPERAND (op0, 0)),
8108 fold_convert_loc (loc, mult_type,
8109 TREE_OPERAND (op0, 1)));
8110 return fold_convert_loc (loc, type, tem);
8111 }
8112 }
8113
8114 return NULL_TREE;
8115
8116 case VIEW_CONVERT_EXPR:
8117 if (TREE_CODE (op0) == MEM_REF)
8118 return fold_build2_loc (loc, MEM_REF, type,
8119 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8120
8121 return NULL_TREE;
8122
8123 case NEGATE_EXPR:
8124 tem = fold_negate_expr (loc, arg0);
8125 if (tem)
8126 return fold_convert_loc (loc, type, tem);
8127 return NULL_TREE;
8128
8129 case ABS_EXPR:
8130 /* Convert fabs((double)float) into (double)fabsf(float). */
8131 if (TREE_CODE (arg0) == NOP_EXPR
8132 && TREE_CODE (type) == REAL_TYPE)
8133 {
8134 tree targ0 = strip_float_extensions (arg0);
8135 if (targ0 != arg0)
8136 return fold_convert_loc (loc, type,
8137 fold_build1_loc (loc, ABS_EXPR,
8138 TREE_TYPE (targ0),
8139 targ0));
8140 }
8141 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8142 else if (TREE_CODE (arg0) == ABS_EXPR)
8143 return arg0;
8144
8145 /* Strip sign ops from argument. */
8146 if (TREE_CODE (type) == REAL_TYPE)
8147 {
8148 tem = fold_strip_sign_ops (arg0);
8149 if (tem)
8150 return fold_build1_loc (loc, ABS_EXPR, type,
8151 fold_convert_loc (loc, type, tem));
8152 }
8153 return NULL_TREE;
8154
8155 case CONJ_EXPR:
8156 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8157 return fold_convert_loc (loc, type, arg0);
8158 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8159 {
8160 tree itype = TREE_TYPE (type);
8161 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8162 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8163 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8164 negate_expr (ipart));
8165 }
8166 if (TREE_CODE (arg0) == CONJ_EXPR)
8167 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8168 return NULL_TREE;
8169
8170 case BIT_NOT_EXPR:
8171 /* Convert ~ (-A) to A - 1. */
8172 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8173 return fold_build2_loc (loc, MINUS_EXPR, type,
8174 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8175 build_int_cst (type, 1));
8176 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8177 else if (INTEGRAL_TYPE_P (type)
8178 && ((TREE_CODE (arg0) == MINUS_EXPR
8179 && integer_onep (TREE_OPERAND (arg0, 1)))
8180 || (TREE_CODE (arg0) == PLUS_EXPR
8181 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8182 {
8183 /* Perform the negation in ARG0's type and only then convert
8184 to TYPE as to avoid introducing undefined behavior. */
8185 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8186 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8187 TREE_OPERAND (arg0, 0));
8188 return fold_convert_loc (loc, type, t);
8189 }
8190 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8191 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8192 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8193 fold_convert_loc (loc, type,
8194 TREE_OPERAND (arg0, 0)))))
8195 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8196 fold_convert_loc (loc, type,
8197 TREE_OPERAND (arg0, 1)));
8198 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8199 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8200 fold_convert_loc (loc, type,
8201 TREE_OPERAND (arg0, 1)))))
8202 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8203 fold_convert_loc (loc, type,
8204 TREE_OPERAND (arg0, 0)), tem);
8205
8206 return NULL_TREE;
8207
8208 case TRUTH_NOT_EXPR:
8209 /* Note that the operand of this must be an int
8210 and its values must be 0 or 1.
8211 ("true" is a fixed value perhaps depending on the language,
8212 but we don't handle values other than 1 correctly yet.) */
8213 tem = fold_truth_not_expr (loc, arg0);
8214 if (!tem)
8215 return NULL_TREE;
8216 return fold_convert_loc (loc, type, tem);
8217
8218 case REALPART_EXPR:
8219 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8220 return fold_convert_loc (loc, type, arg0);
8221 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8222 {
8223 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8224 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8225 fold_build1_loc (loc, REALPART_EXPR, itype,
8226 TREE_OPERAND (arg0, 0)),
8227 fold_build1_loc (loc, REALPART_EXPR, itype,
8228 TREE_OPERAND (arg0, 1)));
8229 return fold_convert_loc (loc, type, tem);
8230 }
8231 if (TREE_CODE (arg0) == CONJ_EXPR)
8232 {
8233 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8234 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8235 TREE_OPERAND (arg0, 0));
8236 return fold_convert_loc (loc, type, tem);
8237 }
8238 if (TREE_CODE (arg0) == CALL_EXPR)
8239 {
8240 tree fn = get_callee_fndecl (arg0);
8241 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8242 switch (DECL_FUNCTION_CODE (fn))
8243 {
8244 CASE_FLT_FN (BUILT_IN_CEXPI):
8245 fn = mathfn_built_in (type, BUILT_IN_COS);
8246 if (fn)
8247 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8248 break;
8249
8250 default:
8251 break;
8252 }
8253 }
8254 return NULL_TREE;
8255
8256 case IMAGPART_EXPR:
8257 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8258 return build_zero_cst (type);
8259 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8260 {
8261 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8262 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8263 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8264 TREE_OPERAND (arg0, 0)),
8265 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8266 TREE_OPERAND (arg0, 1)));
8267 return fold_convert_loc (loc, type, tem);
8268 }
8269 if (TREE_CODE (arg0) == CONJ_EXPR)
8270 {
8271 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8272 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8273 return fold_convert_loc (loc, type, negate_expr (tem));
8274 }
8275 if (TREE_CODE (arg0) == CALL_EXPR)
8276 {
8277 tree fn = get_callee_fndecl (arg0);
8278 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8279 switch (DECL_FUNCTION_CODE (fn))
8280 {
8281 CASE_FLT_FN (BUILT_IN_CEXPI):
8282 fn = mathfn_built_in (type, BUILT_IN_SIN);
8283 if (fn)
8284 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8285 break;
8286
8287 default:
8288 break;
8289 }
8290 }
8291 return NULL_TREE;
8292
8293 case INDIRECT_REF:
8294 /* Fold *&X to X if X is an lvalue. */
8295 if (TREE_CODE (op0) == ADDR_EXPR)
8296 {
8297 tree op00 = TREE_OPERAND (op0, 0);
8298 if ((TREE_CODE (op00) == VAR_DECL
8299 || TREE_CODE (op00) == PARM_DECL
8300 || TREE_CODE (op00) == RESULT_DECL)
8301 && !TREE_READONLY (op00))
8302 return op00;
8303 }
8304 return NULL_TREE;
8305
8306 default:
8307 return NULL_TREE;
8308 } /* switch (code) */
8309 }
8310
8311
8312 /* If the operation was a conversion do _not_ mark a resulting constant
8313 with TREE_OVERFLOW if the original constant was not. These conversions
8314 have implementation defined behavior and retaining the TREE_OVERFLOW
8315 flag here would confuse later passes such as VRP. */
8316 tree
8317 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8318 tree type, tree op0)
8319 {
8320 tree res = fold_unary_loc (loc, code, type, op0);
8321 if (res
8322 && TREE_CODE (res) == INTEGER_CST
8323 && TREE_CODE (op0) == INTEGER_CST
8324 && CONVERT_EXPR_CODE_P (code))
8325 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8326
8327 return res;
8328 }
8329
8330 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8331 operands OP0 and OP1. LOC is the location of the resulting expression.
8332 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8333 Return the folded expression if folding is successful. Otherwise,
8334 return NULL_TREE. */
8335 static tree
8336 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8337 tree arg0, tree arg1, tree op0, tree op1)
8338 {
8339 tree tem;
8340
8341 /* We only do these simplifications if we are optimizing. */
8342 if (!optimize)
8343 return NULL_TREE;
8344
8345 /* Check for things like (A || B) && (A || C). We can convert this
8346 to A || (B && C). Note that either operator can be any of the four
8347 truth and/or operations and the transformation will still be
8348 valid. Also note that we only care about order for the
8349 ANDIF and ORIF operators. If B contains side effects, this
8350 might change the truth-value of A. */
8351 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8352 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8353 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8354 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8355 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8356 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8357 {
8358 tree a00 = TREE_OPERAND (arg0, 0);
8359 tree a01 = TREE_OPERAND (arg0, 1);
8360 tree a10 = TREE_OPERAND (arg1, 0);
8361 tree a11 = TREE_OPERAND (arg1, 1);
8362 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8363 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8364 && (code == TRUTH_AND_EXPR
8365 || code == TRUTH_OR_EXPR));
8366
8367 if (operand_equal_p (a00, a10, 0))
8368 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8369 fold_build2_loc (loc, code, type, a01, a11));
8370 else if (commutative && operand_equal_p (a00, a11, 0))
8371 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8372 fold_build2_loc (loc, code, type, a01, a10));
8373 else if (commutative && operand_equal_p (a01, a10, 0))
8374 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8375 fold_build2_loc (loc, code, type, a00, a11));
8376
8377 /* This case if tricky because we must either have commutative
8378 operators or else A10 must not have side-effects. */
8379
8380 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8381 && operand_equal_p (a01, a11, 0))
8382 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8383 fold_build2_loc (loc, code, type, a00, a10),
8384 a01);
8385 }
8386
8387 /* See if we can build a range comparison. */
8388 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8389 return tem;
8390
8391 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8392 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8393 {
8394 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8395 if (tem)
8396 return fold_build2_loc (loc, code, type, tem, arg1);
8397 }
8398
8399 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8400 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8401 {
8402 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8403 if (tem)
8404 return fold_build2_loc (loc, code, type, arg0, tem);
8405 }
8406
8407 /* Check for the possibility of merging component references. If our
8408 lhs is another similar operation, try to merge its rhs with our
8409 rhs. Then try to merge our lhs and rhs. */
8410 if (TREE_CODE (arg0) == code
8411 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8412 TREE_OPERAND (arg0, 1), arg1)))
8413 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8414
8415 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8416 return tem;
8417
8418 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8419 && (code == TRUTH_AND_EXPR
8420 || code == TRUTH_ANDIF_EXPR
8421 || code == TRUTH_OR_EXPR
8422 || code == TRUTH_ORIF_EXPR))
8423 {
8424 enum tree_code ncode, icode;
8425
8426 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8427 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8428 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8429
8430 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8431 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8432 We don't want to pack more than two leafs to a non-IF AND/OR
8433 expression.
8434 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8435 equal to IF-CODE, then we don't want to add right-hand operand.
8436 If the inner right-hand side of left-hand operand has
8437 side-effects, or isn't simple, then we can't add to it,
8438 as otherwise we might destroy if-sequence. */
8439 if (TREE_CODE (arg0) == icode
8440 && simple_operand_p_2 (arg1)
8441 /* Needed for sequence points to handle trappings, and
8442 side-effects. */
8443 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8444 {
8445 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8446 arg1);
8447 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8448 tem);
8449 }
8450 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8451 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8452 else if (TREE_CODE (arg1) == icode
8453 && simple_operand_p_2 (arg0)
8454 /* Needed for sequence points to handle trappings, and
8455 side-effects. */
8456 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8457 {
8458 tem = fold_build2_loc (loc, ncode, type,
8459 arg0, TREE_OPERAND (arg1, 0));
8460 return fold_build2_loc (loc, icode, type, tem,
8461 TREE_OPERAND (arg1, 1));
8462 }
8463 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8464 into (A OR B).
8465 For sequence point consistancy, we need to check for trapping,
8466 and side-effects. */
8467 else if (code == icode && simple_operand_p_2 (arg0)
8468 && simple_operand_p_2 (arg1))
8469 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8470 }
8471
8472 return NULL_TREE;
8473 }
8474
8475 /* Fold a binary expression of code CODE and type TYPE with operands
8476 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8477 Return the folded expression if folding is successful. Otherwise,
8478 return NULL_TREE. */
8479
8480 static tree
8481 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8482 {
8483 enum tree_code compl_code;
8484
8485 if (code == MIN_EXPR)
8486 compl_code = MAX_EXPR;
8487 else if (code == MAX_EXPR)
8488 compl_code = MIN_EXPR;
8489 else
8490 gcc_unreachable ();
8491
8492 /* MIN (MAX (a, b), b) == b. */
8493 if (TREE_CODE (op0) == compl_code
8494 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8495 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8496
8497 /* MIN (MAX (b, a), b) == b. */
8498 if (TREE_CODE (op0) == compl_code
8499 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8500 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8501 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8502
8503 /* MIN (a, MAX (a, b)) == a. */
8504 if (TREE_CODE (op1) == compl_code
8505 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8506 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8507 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8508
8509 /* MIN (a, MAX (b, a)) == a. */
8510 if (TREE_CODE (op1) == compl_code
8511 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8512 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8513 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8514
8515 return NULL_TREE;
8516 }
8517
8518 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8519 by changing CODE to reduce the magnitude of constants involved in
8520 ARG0 of the comparison.
8521 Returns a canonicalized comparison tree if a simplification was
8522 possible, otherwise returns NULL_TREE.
8523 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8524 valid if signed overflow is undefined. */
8525
8526 static tree
8527 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8528 tree arg0, tree arg1,
8529 bool *strict_overflow_p)
8530 {
8531 enum tree_code code0 = TREE_CODE (arg0);
8532 tree t, cst0 = NULL_TREE;
8533 int sgn0;
8534 bool swap = false;
8535
8536 /* Match A +- CST code arg1 and CST code arg1. We can change the
8537 first form only if overflow is undefined. */
8538 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8539 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8540 /* In principle pointers also have undefined overflow behavior,
8541 but that causes problems elsewhere. */
8542 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8543 && (code0 == MINUS_EXPR
8544 || code0 == PLUS_EXPR)
8545 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8546 || code0 == INTEGER_CST))
8547 return NULL_TREE;
8548
8549 /* Identify the constant in arg0 and its sign. */
8550 if (code0 == INTEGER_CST)
8551 cst0 = arg0;
8552 else
8553 cst0 = TREE_OPERAND (arg0, 1);
8554 sgn0 = tree_int_cst_sgn (cst0);
8555
8556 /* Overflowed constants and zero will cause problems. */
8557 if (integer_zerop (cst0)
8558 || TREE_OVERFLOW (cst0))
8559 return NULL_TREE;
8560
8561 /* See if we can reduce the magnitude of the constant in
8562 arg0 by changing the comparison code. */
8563 if (code0 == INTEGER_CST)
8564 {
8565 /* CST <= arg1 -> CST-1 < arg1. */
8566 if (code == LE_EXPR && sgn0 == 1)
8567 code = LT_EXPR;
8568 /* -CST < arg1 -> -CST-1 <= arg1. */
8569 else if (code == LT_EXPR && sgn0 == -1)
8570 code = LE_EXPR;
8571 /* CST > arg1 -> CST-1 >= arg1. */
8572 else if (code == GT_EXPR && sgn0 == 1)
8573 code = GE_EXPR;
8574 /* -CST >= arg1 -> -CST-1 > arg1. */
8575 else if (code == GE_EXPR && sgn0 == -1)
8576 code = GT_EXPR;
8577 else
8578 return NULL_TREE;
8579 /* arg1 code' CST' might be more canonical. */
8580 swap = true;
8581 }
8582 else
8583 {
8584 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8585 if (code == LT_EXPR
8586 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8587 code = LE_EXPR;
8588 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8589 else if (code == GT_EXPR
8590 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8591 code = GE_EXPR;
8592 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8593 else if (code == LE_EXPR
8594 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8595 code = LT_EXPR;
8596 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8597 else if (code == GE_EXPR
8598 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8599 code = GT_EXPR;
8600 else
8601 return NULL_TREE;
8602 *strict_overflow_p = true;
8603 }
8604
8605 /* Now build the constant reduced in magnitude. But not if that
8606 would produce one outside of its types range. */
8607 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8608 && ((sgn0 == 1
8609 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8610 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8611 || (sgn0 == -1
8612 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8613 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8614 /* We cannot swap the comparison here as that would cause us to
8615 endlessly recurse. */
8616 return NULL_TREE;
8617
8618 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8619 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8620 if (code0 != INTEGER_CST)
8621 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8622 t = fold_convert (TREE_TYPE (arg1), t);
8623
8624 /* If swapping might yield to a more canonical form, do so. */
8625 if (swap)
8626 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8627 else
8628 return fold_build2_loc (loc, code, type, t, arg1);
8629 }
8630
8631 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8632 overflow further. Try to decrease the magnitude of constants involved
8633 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8634 and put sole constants at the second argument position.
8635 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8636
8637 static tree
8638 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8639 tree arg0, tree arg1)
8640 {
8641 tree t;
8642 bool strict_overflow_p;
8643 const char * const warnmsg = G_("assuming signed overflow does not occur "
8644 "when reducing constant in comparison");
8645
8646 /* Try canonicalization by simplifying arg0. */
8647 strict_overflow_p = false;
8648 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8649 &strict_overflow_p);
8650 if (t)
8651 {
8652 if (strict_overflow_p)
8653 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8654 return t;
8655 }
8656
8657 /* Try canonicalization by simplifying arg1 using the swapped
8658 comparison. */
8659 code = swap_tree_comparison (code);
8660 strict_overflow_p = false;
8661 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8662 &strict_overflow_p);
8663 if (t && strict_overflow_p)
8664 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8665 return t;
8666 }
8667
8668 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8669 space. This is used to avoid issuing overflow warnings for
8670 expressions like &p->x which can not wrap. */
8671
8672 static bool
8673 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8674 {
8675 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8676 return true;
8677
8678 if (bitpos < 0)
8679 return true;
8680
8681 wide_int wi_offset;
8682 int precision = TYPE_PRECISION (TREE_TYPE (base));
8683 if (offset == NULL_TREE)
8684 wi_offset = wi::zero (precision);
8685 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8686 return true;
8687 else
8688 wi_offset = offset;
8689
8690 bool overflow;
8691 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8692 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8693 if (overflow)
8694 return true;
8695
8696 if (!wi::fits_uhwi_p (total))
8697 return true;
8698
8699 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8700 if (size <= 0)
8701 return true;
8702
8703 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8704 array. */
8705 if (TREE_CODE (base) == ADDR_EXPR)
8706 {
8707 HOST_WIDE_INT base_size;
8708
8709 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8710 if (base_size > 0 && size < base_size)
8711 size = base_size;
8712 }
8713
8714 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8715 }
8716
8717 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8718 kind INTEGER_CST. This makes sure to properly sign-extend the
8719 constant. */
8720
8721 static HOST_WIDE_INT
8722 size_low_cst (const_tree t)
8723 {
8724 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8725 int prec = TYPE_PRECISION (TREE_TYPE (t));
8726 if (prec < HOST_BITS_PER_WIDE_INT)
8727 return sext_hwi (w, prec);
8728 return w;
8729 }
8730
8731 /* Subroutine of fold_binary. This routine performs all of the
8732 transformations that are common to the equality/inequality
8733 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8734 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8735 fold_binary should call fold_binary. Fold a comparison with
8736 tree code CODE and type TYPE with operands OP0 and OP1. Return
8737 the folded comparison or NULL_TREE. */
8738
8739 static tree
8740 fold_comparison (location_t loc, enum tree_code code, tree type,
8741 tree op0, tree op1)
8742 {
8743 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8744 tree arg0, arg1, tem;
8745
8746 arg0 = op0;
8747 arg1 = op1;
8748
8749 STRIP_SIGN_NOPS (arg0);
8750 STRIP_SIGN_NOPS (arg1);
8751
8752 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8753 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8754 && (equality_code
8755 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8756 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8757 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8758 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8759 && TREE_CODE (arg1) == INTEGER_CST
8760 && !TREE_OVERFLOW (arg1))
8761 {
8762 const enum tree_code
8763 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8764 tree const1 = TREE_OPERAND (arg0, 1);
8765 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8766 tree variable = TREE_OPERAND (arg0, 0);
8767 tree new_const = int_const_binop (reverse_op, const2, const1);
8768
8769 /* If the constant operation overflowed this can be
8770 simplified as a comparison against INT_MAX/INT_MIN. */
8771 if (TREE_OVERFLOW (new_const)
8772 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8773 {
8774 int const1_sgn = tree_int_cst_sgn (const1);
8775 enum tree_code code2 = code;
8776
8777 /* Get the sign of the constant on the lhs if the
8778 operation were VARIABLE + CONST1. */
8779 if (TREE_CODE (arg0) == MINUS_EXPR)
8780 const1_sgn = -const1_sgn;
8781
8782 /* The sign of the constant determines if we overflowed
8783 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8784 Canonicalize to the INT_MIN overflow by swapping the comparison
8785 if necessary. */
8786 if (const1_sgn == -1)
8787 code2 = swap_tree_comparison (code);
8788
8789 /* We now can look at the canonicalized case
8790 VARIABLE + 1 CODE2 INT_MIN
8791 and decide on the result. */
8792 switch (code2)
8793 {
8794 case EQ_EXPR:
8795 case LT_EXPR:
8796 case LE_EXPR:
8797 return
8798 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8799
8800 case NE_EXPR:
8801 case GE_EXPR:
8802 case GT_EXPR:
8803 return
8804 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8805
8806 default:
8807 gcc_unreachable ();
8808 }
8809 }
8810 else
8811 {
8812 if (!equality_code)
8813 fold_overflow_warning ("assuming signed overflow does not occur "
8814 "when changing X +- C1 cmp C2 to "
8815 "X cmp C2 -+ C1",
8816 WARN_STRICT_OVERFLOW_COMPARISON);
8817 return fold_build2_loc (loc, code, type, variable, new_const);
8818 }
8819 }
8820
8821 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8822 if (TREE_CODE (arg0) == MINUS_EXPR
8823 && equality_code
8824 && integer_zerop (arg1))
8825 {
8826 /* ??? The transformation is valid for the other operators if overflow
8827 is undefined for the type, but performing it here badly interacts
8828 with the transformation in fold_cond_expr_with_comparison which
8829 attempts to synthetize ABS_EXPR. */
8830 if (!equality_code)
8831 fold_overflow_warning ("assuming signed overflow does not occur "
8832 "when changing X - Y cmp 0 to X cmp Y",
8833 WARN_STRICT_OVERFLOW_COMPARISON);
8834 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8835 TREE_OPERAND (arg0, 1));
8836 }
8837
8838 /* For comparisons of pointers we can decompose it to a compile time
8839 comparison of the base objects and the offsets into the object.
8840 This requires at least one operand being an ADDR_EXPR or a
8841 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8842 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8843 && (TREE_CODE (arg0) == ADDR_EXPR
8844 || TREE_CODE (arg1) == ADDR_EXPR
8845 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8846 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8847 {
8848 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8849 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8850 machine_mode mode;
8851 int volatilep, unsignedp;
8852 bool indirect_base0 = false, indirect_base1 = false;
8853
8854 /* Get base and offset for the access. Strip ADDR_EXPR for
8855 get_inner_reference, but put it back by stripping INDIRECT_REF
8856 off the base object if possible. indirect_baseN will be true
8857 if baseN is not an address but refers to the object itself. */
8858 base0 = arg0;
8859 if (TREE_CODE (arg0) == ADDR_EXPR)
8860 {
8861 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8862 &bitsize, &bitpos0, &offset0, &mode,
8863 &unsignedp, &volatilep, false);
8864 if (TREE_CODE (base0) == INDIRECT_REF)
8865 base0 = TREE_OPERAND (base0, 0);
8866 else
8867 indirect_base0 = true;
8868 }
8869 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8870 {
8871 base0 = TREE_OPERAND (arg0, 0);
8872 STRIP_SIGN_NOPS (base0);
8873 if (TREE_CODE (base0) == ADDR_EXPR)
8874 {
8875 base0 = TREE_OPERAND (base0, 0);
8876 indirect_base0 = true;
8877 }
8878 offset0 = TREE_OPERAND (arg0, 1);
8879 if (tree_fits_shwi_p (offset0))
8880 {
8881 HOST_WIDE_INT off = size_low_cst (offset0);
8882 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8883 * BITS_PER_UNIT)
8884 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8885 {
8886 bitpos0 = off * BITS_PER_UNIT;
8887 offset0 = NULL_TREE;
8888 }
8889 }
8890 }
8891
8892 base1 = arg1;
8893 if (TREE_CODE (arg1) == ADDR_EXPR)
8894 {
8895 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8896 &bitsize, &bitpos1, &offset1, &mode,
8897 &unsignedp, &volatilep, false);
8898 if (TREE_CODE (base1) == INDIRECT_REF)
8899 base1 = TREE_OPERAND (base1, 0);
8900 else
8901 indirect_base1 = true;
8902 }
8903 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8904 {
8905 base1 = TREE_OPERAND (arg1, 0);
8906 STRIP_SIGN_NOPS (base1);
8907 if (TREE_CODE (base1) == ADDR_EXPR)
8908 {
8909 base1 = TREE_OPERAND (base1, 0);
8910 indirect_base1 = true;
8911 }
8912 offset1 = TREE_OPERAND (arg1, 1);
8913 if (tree_fits_shwi_p (offset1))
8914 {
8915 HOST_WIDE_INT off = size_low_cst (offset1);
8916 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8917 * BITS_PER_UNIT)
8918 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8919 {
8920 bitpos1 = off * BITS_PER_UNIT;
8921 offset1 = NULL_TREE;
8922 }
8923 }
8924 }
8925
8926 /* A local variable can never be pointed to by
8927 the default SSA name of an incoming parameter. */
8928 if ((TREE_CODE (arg0) == ADDR_EXPR
8929 && indirect_base0
8930 && TREE_CODE (base0) == VAR_DECL
8931 && auto_var_in_fn_p (base0, current_function_decl)
8932 && !indirect_base1
8933 && TREE_CODE (base1) == SSA_NAME
8934 && SSA_NAME_IS_DEFAULT_DEF (base1)
8935 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8936 || (TREE_CODE (arg1) == ADDR_EXPR
8937 && indirect_base1
8938 && TREE_CODE (base1) == VAR_DECL
8939 && auto_var_in_fn_p (base1, current_function_decl)
8940 && !indirect_base0
8941 && TREE_CODE (base0) == SSA_NAME
8942 && SSA_NAME_IS_DEFAULT_DEF (base0)
8943 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8944 {
8945 if (code == NE_EXPR)
8946 return constant_boolean_node (1, type);
8947 else if (code == EQ_EXPR)
8948 return constant_boolean_node (0, type);
8949 }
8950 /* If we have equivalent bases we might be able to simplify. */
8951 else if (indirect_base0 == indirect_base1
8952 && operand_equal_p (base0, base1, 0))
8953 {
8954 /* We can fold this expression to a constant if the non-constant
8955 offset parts are equal. */
8956 if ((offset0 == offset1
8957 || (offset0 && offset1
8958 && operand_equal_p (offset0, offset1, 0)))
8959 && (code == EQ_EXPR
8960 || code == NE_EXPR
8961 || (indirect_base0 && DECL_P (base0))
8962 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8963
8964 {
8965 if (!equality_code
8966 && bitpos0 != bitpos1
8967 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8968 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8969 fold_overflow_warning (("assuming pointer wraparound does not "
8970 "occur when comparing P +- C1 with "
8971 "P +- C2"),
8972 WARN_STRICT_OVERFLOW_CONDITIONAL);
8973
8974 switch (code)
8975 {
8976 case EQ_EXPR:
8977 return constant_boolean_node (bitpos0 == bitpos1, type);
8978 case NE_EXPR:
8979 return constant_boolean_node (bitpos0 != bitpos1, type);
8980 case LT_EXPR:
8981 return constant_boolean_node (bitpos0 < bitpos1, type);
8982 case LE_EXPR:
8983 return constant_boolean_node (bitpos0 <= bitpos1, type);
8984 case GE_EXPR:
8985 return constant_boolean_node (bitpos0 >= bitpos1, type);
8986 case GT_EXPR:
8987 return constant_boolean_node (bitpos0 > bitpos1, type);
8988 default:;
8989 }
8990 }
8991 /* We can simplify the comparison to a comparison of the variable
8992 offset parts if the constant offset parts are equal.
8993 Be careful to use signed sizetype here because otherwise we
8994 mess with array offsets in the wrong way. This is possible
8995 because pointer arithmetic is restricted to retain within an
8996 object and overflow on pointer differences is undefined as of
8997 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8998 else if (bitpos0 == bitpos1
8999 && (equality_code
9000 || (indirect_base0 && DECL_P (base0))
9001 || POINTER_TYPE_OVERFLOW_UNDEFINED))
9002 {
9003 /* By converting to signed sizetype we cover middle-end pointer
9004 arithmetic which operates on unsigned pointer types of size
9005 type size and ARRAY_REF offsets which are properly sign or
9006 zero extended from their type in case it is narrower than
9007 sizetype. */
9008 if (offset0 == NULL_TREE)
9009 offset0 = build_int_cst (ssizetype, 0);
9010 else
9011 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9012 if (offset1 == NULL_TREE)
9013 offset1 = build_int_cst (ssizetype, 0);
9014 else
9015 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9016
9017 if (!equality_code
9018 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9019 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9020 fold_overflow_warning (("assuming pointer wraparound does not "
9021 "occur when comparing P +- C1 with "
9022 "P +- C2"),
9023 WARN_STRICT_OVERFLOW_COMPARISON);
9024
9025 return fold_build2_loc (loc, code, type, offset0, offset1);
9026 }
9027 }
9028 /* For non-equal bases we can simplify if they are addresses
9029 declarations with different addresses. */
9030 else if (indirect_base0 && indirect_base1
9031 /* We know that !operand_equal_p (base0, base1, 0)
9032 because the if condition was false. But make
9033 sure two decls are not the same. */
9034 && base0 != base1
9035 && TREE_CODE (arg0) == ADDR_EXPR
9036 && TREE_CODE (arg1) == ADDR_EXPR
9037 && DECL_P (base0)
9038 && DECL_P (base1)
9039 /* Watch for aliases. */
9040 && (!decl_in_symtab_p (base0)
9041 || !decl_in_symtab_p (base1)
9042 || !symtab_node::get_create (base0)->equal_address_to
9043 (symtab_node::get_create (base1))))
9044 {
9045 if (code == EQ_EXPR)
9046 return omit_two_operands_loc (loc, type, boolean_false_node,
9047 arg0, arg1);
9048 else if (code == NE_EXPR)
9049 return omit_two_operands_loc (loc, type, boolean_true_node,
9050 arg0, arg1);
9051 }
9052 /* For equal offsets we can simplify to a comparison of the
9053 base addresses. */
9054 else if (bitpos0 == bitpos1
9055 && (indirect_base0
9056 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9057 && (indirect_base1
9058 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9059 && ((offset0 == offset1)
9060 || (offset0 && offset1
9061 && operand_equal_p (offset0, offset1, 0))))
9062 {
9063 if (indirect_base0)
9064 base0 = build_fold_addr_expr_loc (loc, base0);
9065 if (indirect_base1)
9066 base1 = build_fold_addr_expr_loc (loc, base1);
9067 return fold_build2_loc (loc, code, type, base0, base1);
9068 }
9069 }
9070
9071 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9072 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9073 the resulting offset is smaller in absolute value than the
9074 original one and has the same sign. */
9075 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9076 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9077 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9078 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9079 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9080 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9081 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9082 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9083 {
9084 tree const1 = TREE_OPERAND (arg0, 1);
9085 tree const2 = TREE_OPERAND (arg1, 1);
9086 tree variable1 = TREE_OPERAND (arg0, 0);
9087 tree variable2 = TREE_OPERAND (arg1, 0);
9088 tree cst;
9089 const char * const warnmsg = G_("assuming signed overflow does not "
9090 "occur when combining constants around "
9091 "a comparison");
9092
9093 /* Put the constant on the side where it doesn't overflow and is
9094 of lower absolute value and of same sign than before. */
9095 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9096 ? MINUS_EXPR : PLUS_EXPR,
9097 const2, const1);
9098 if (!TREE_OVERFLOW (cst)
9099 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9100 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9101 {
9102 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9103 return fold_build2_loc (loc, code, type,
9104 variable1,
9105 fold_build2_loc (loc, TREE_CODE (arg1),
9106 TREE_TYPE (arg1),
9107 variable2, cst));
9108 }
9109
9110 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9111 ? MINUS_EXPR : PLUS_EXPR,
9112 const1, const2);
9113 if (!TREE_OVERFLOW (cst)
9114 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9115 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9116 {
9117 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9118 return fold_build2_loc (loc, code, type,
9119 fold_build2_loc (loc, TREE_CODE (arg0),
9120 TREE_TYPE (arg0),
9121 variable1, cst),
9122 variable2);
9123 }
9124 }
9125
9126 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9127 signed arithmetic case. That form is created by the compiler
9128 often enough for folding it to be of value. One example is in
9129 computing loop trip counts after Operator Strength Reduction. */
9130 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9131 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9132 && TREE_CODE (arg0) == MULT_EXPR
9133 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9134 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9135 && integer_zerop (arg1))
9136 {
9137 tree const1 = TREE_OPERAND (arg0, 1);
9138 tree const2 = arg1; /* zero */
9139 tree variable1 = TREE_OPERAND (arg0, 0);
9140 enum tree_code cmp_code = code;
9141
9142 /* Handle unfolded multiplication by zero. */
9143 if (integer_zerop (const1))
9144 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9145
9146 fold_overflow_warning (("assuming signed overflow does not occur when "
9147 "eliminating multiplication in comparison "
9148 "with zero"),
9149 WARN_STRICT_OVERFLOW_COMPARISON);
9150
9151 /* If const1 is negative we swap the sense of the comparison. */
9152 if (tree_int_cst_sgn (const1) < 0)
9153 cmp_code = swap_tree_comparison (cmp_code);
9154
9155 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9156 }
9157
9158 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9159 if (tem)
9160 return tem;
9161
9162 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9163 {
9164 tree targ0 = strip_float_extensions (arg0);
9165 tree targ1 = strip_float_extensions (arg1);
9166 tree newtype = TREE_TYPE (targ0);
9167
9168 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9169 newtype = TREE_TYPE (targ1);
9170
9171 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9172 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9173 return fold_build2_loc (loc, code, type,
9174 fold_convert_loc (loc, newtype, targ0),
9175 fold_convert_loc (loc, newtype, targ1));
9176
9177 /* (-a) CMP (-b) -> b CMP a */
9178 if (TREE_CODE (arg0) == NEGATE_EXPR
9179 && TREE_CODE (arg1) == NEGATE_EXPR)
9180 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg1, 0),
9181 TREE_OPERAND (arg0, 0));
9182
9183 if (TREE_CODE (arg1) == REAL_CST)
9184 {
9185 REAL_VALUE_TYPE cst;
9186 cst = TREE_REAL_CST (arg1);
9187
9188 /* (-a) CMP CST -> a swap(CMP) (-CST) */
9189 if (TREE_CODE (arg0) == NEGATE_EXPR)
9190 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9191 TREE_OPERAND (arg0, 0),
9192 build_real (TREE_TYPE (arg1),
9193 real_value_negate (&cst)));
9194
9195 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9196 /* a CMP (-0) -> a CMP 0 */
9197 if (REAL_VALUE_MINUS_ZERO (cst))
9198 return fold_build2_loc (loc, code, type, arg0,
9199 build_real (TREE_TYPE (arg1), dconst0));
9200
9201 /* x != NaN is always true, other ops are always false. */
9202 if (REAL_VALUE_ISNAN (cst)
9203 && ! HONOR_SNANS (arg1))
9204 {
9205 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9206 return omit_one_operand_loc (loc, type, tem, arg0);
9207 }
9208
9209 /* Fold comparisons against infinity. */
9210 if (REAL_VALUE_ISINF (cst)
9211 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9212 {
9213 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9214 if (tem != NULL_TREE)
9215 return tem;
9216 }
9217 }
9218
9219 /* If this is a comparison of a real constant with a PLUS_EXPR
9220 or a MINUS_EXPR of a real constant, we can convert it into a
9221 comparison with a revised real constant as long as no overflow
9222 occurs when unsafe_math_optimizations are enabled. */
9223 if (flag_unsafe_math_optimizations
9224 && TREE_CODE (arg1) == REAL_CST
9225 && (TREE_CODE (arg0) == PLUS_EXPR
9226 || TREE_CODE (arg0) == MINUS_EXPR)
9227 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9228 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9229 ? MINUS_EXPR : PLUS_EXPR,
9230 arg1, TREE_OPERAND (arg0, 1)))
9231 && !TREE_OVERFLOW (tem))
9232 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9233
9234 /* Likewise, we can simplify a comparison of a real constant with
9235 a MINUS_EXPR whose first operand is also a real constant, i.e.
9236 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9237 floating-point types only if -fassociative-math is set. */
9238 if (flag_associative_math
9239 && TREE_CODE (arg1) == REAL_CST
9240 && TREE_CODE (arg0) == MINUS_EXPR
9241 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9242 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9243 arg1))
9244 && !TREE_OVERFLOW (tem))
9245 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9246 TREE_OPERAND (arg0, 1), tem);
9247
9248 /* Fold comparisons against built-in math functions. */
9249 if (TREE_CODE (arg1) == REAL_CST
9250 && flag_unsafe_math_optimizations
9251 && ! flag_errno_math)
9252 {
9253 enum built_in_function fcode = builtin_mathfn_code (arg0);
9254
9255 if (fcode != END_BUILTINS)
9256 {
9257 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9258 if (tem != NULL_TREE)
9259 return tem;
9260 }
9261 }
9262 }
9263
9264 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9265 && CONVERT_EXPR_P (arg0))
9266 {
9267 /* If we are widening one operand of an integer comparison,
9268 see if the other operand is similarly being widened. Perhaps we
9269 can do the comparison in the narrower type. */
9270 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9271 if (tem)
9272 return tem;
9273
9274 /* Or if we are changing signedness. */
9275 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9276 if (tem)
9277 return tem;
9278 }
9279
9280 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9281 constant, we can simplify it. */
9282 if (TREE_CODE (arg1) == INTEGER_CST
9283 && (TREE_CODE (arg0) == MIN_EXPR
9284 || TREE_CODE (arg0) == MAX_EXPR)
9285 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9286 {
9287 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9288 if (tem)
9289 return tem;
9290 }
9291
9292 /* Simplify comparison of something with itself. (For IEEE
9293 floating-point, we can only do some of these simplifications.) */
9294 if (operand_equal_p (arg0, arg1, 0))
9295 {
9296 switch (code)
9297 {
9298 case EQ_EXPR:
9299 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9300 || ! HONOR_NANS (arg0))
9301 return constant_boolean_node (1, type);
9302 break;
9303
9304 case GE_EXPR:
9305 case LE_EXPR:
9306 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9307 || ! HONOR_NANS (arg0))
9308 return constant_boolean_node (1, type);
9309 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9310
9311 case NE_EXPR:
9312 /* For NE, we can only do this simplification if integer
9313 or we don't honor IEEE floating point NaNs. */
9314 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9315 && HONOR_NANS (arg0))
9316 break;
9317 /* ... fall through ... */
9318 case GT_EXPR:
9319 case LT_EXPR:
9320 return constant_boolean_node (0, type);
9321 default:
9322 gcc_unreachable ();
9323 }
9324 }
9325
9326 /* If we are comparing an expression that just has comparisons
9327 of two integer values, arithmetic expressions of those comparisons,
9328 and constants, we can simplify it. There are only three cases
9329 to check: the two values can either be equal, the first can be
9330 greater, or the second can be greater. Fold the expression for
9331 those three values. Since each value must be 0 or 1, we have
9332 eight possibilities, each of which corresponds to the constant 0
9333 or 1 or one of the six possible comparisons.
9334
9335 This handles common cases like (a > b) == 0 but also handles
9336 expressions like ((x > y) - (y > x)) > 0, which supposedly
9337 occur in macroized code. */
9338
9339 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9340 {
9341 tree cval1 = 0, cval2 = 0;
9342 int save_p = 0;
9343
9344 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9345 /* Don't handle degenerate cases here; they should already
9346 have been handled anyway. */
9347 && cval1 != 0 && cval2 != 0
9348 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9349 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9350 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9351 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9352 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9353 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9354 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9355 {
9356 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9357 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9358
9359 /* We can't just pass T to eval_subst in case cval1 or cval2
9360 was the same as ARG1. */
9361
9362 tree high_result
9363 = fold_build2_loc (loc, code, type,
9364 eval_subst (loc, arg0, cval1, maxval,
9365 cval2, minval),
9366 arg1);
9367 tree equal_result
9368 = fold_build2_loc (loc, code, type,
9369 eval_subst (loc, arg0, cval1, maxval,
9370 cval2, maxval),
9371 arg1);
9372 tree low_result
9373 = fold_build2_loc (loc, code, type,
9374 eval_subst (loc, arg0, cval1, minval,
9375 cval2, maxval),
9376 arg1);
9377
9378 /* All three of these results should be 0 or 1. Confirm they are.
9379 Then use those values to select the proper code to use. */
9380
9381 if (TREE_CODE (high_result) == INTEGER_CST
9382 && TREE_CODE (equal_result) == INTEGER_CST
9383 && TREE_CODE (low_result) == INTEGER_CST)
9384 {
9385 /* Make a 3-bit mask with the high-order bit being the
9386 value for `>', the next for '=', and the low for '<'. */
9387 switch ((integer_onep (high_result) * 4)
9388 + (integer_onep (equal_result) * 2)
9389 + integer_onep (low_result))
9390 {
9391 case 0:
9392 /* Always false. */
9393 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9394 case 1:
9395 code = LT_EXPR;
9396 break;
9397 case 2:
9398 code = EQ_EXPR;
9399 break;
9400 case 3:
9401 code = LE_EXPR;
9402 break;
9403 case 4:
9404 code = GT_EXPR;
9405 break;
9406 case 5:
9407 code = NE_EXPR;
9408 break;
9409 case 6:
9410 code = GE_EXPR;
9411 break;
9412 case 7:
9413 /* Always true. */
9414 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9415 }
9416
9417 if (save_p)
9418 {
9419 tem = save_expr (build2 (code, type, cval1, cval2));
9420 SET_EXPR_LOCATION (tem, loc);
9421 return tem;
9422 }
9423 return fold_build2_loc (loc, code, type, cval1, cval2);
9424 }
9425 }
9426 }
9427
9428 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9429 into a single range test. */
9430 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9431 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9432 && TREE_CODE (arg1) == INTEGER_CST
9433 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9434 && !integer_zerop (TREE_OPERAND (arg0, 1))
9435 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9436 && !TREE_OVERFLOW (arg1))
9437 {
9438 tem = fold_div_compare (loc, code, type, arg0, arg1);
9439 if (tem != NULL_TREE)
9440 return tem;
9441 }
9442
9443 /* Fold ~X op ~Y as Y op X. */
9444 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9445 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9446 {
9447 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9448 return fold_build2_loc (loc, code, type,
9449 fold_convert_loc (loc, cmp_type,
9450 TREE_OPERAND (arg1, 0)),
9451 TREE_OPERAND (arg0, 0));
9452 }
9453
9454 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9455 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9456 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9457 {
9458 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9459 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9460 TREE_OPERAND (arg0, 0),
9461 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9462 fold_convert_loc (loc, cmp_type, arg1)));
9463 }
9464
9465 return NULL_TREE;
9466 }
9467
9468
9469 /* Subroutine of fold_binary. Optimize complex multiplications of the
9470 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9471 argument EXPR represents the expression "z" of type TYPE. */
9472
9473 static tree
9474 fold_mult_zconjz (location_t loc, tree type, tree expr)
9475 {
9476 tree itype = TREE_TYPE (type);
9477 tree rpart, ipart, tem;
9478
9479 if (TREE_CODE (expr) == COMPLEX_EXPR)
9480 {
9481 rpart = TREE_OPERAND (expr, 0);
9482 ipart = TREE_OPERAND (expr, 1);
9483 }
9484 else if (TREE_CODE (expr) == COMPLEX_CST)
9485 {
9486 rpart = TREE_REALPART (expr);
9487 ipart = TREE_IMAGPART (expr);
9488 }
9489 else
9490 {
9491 expr = save_expr (expr);
9492 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9493 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9494 }
9495
9496 rpart = save_expr (rpart);
9497 ipart = save_expr (ipart);
9498 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9499 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9500 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9501 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9502 build_zero_cst (itype));
9503 }
9504
9505
9506 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9507 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9508 guarantees that P and N have the same least significant log2(M) bits.
9509 N is not otherwise constrained. In particular, N is not normalized to
9510 0 <= N < M as is common. In general, the precise value of P is unknown.
9511 M is chosen as large as possible such that constant N can be determined.
9512
9513 Returns M and sets *RESIDUE to N.
9514
9515 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9516 account. This is not always possible due to PR 35705.
9517 */
9518
9519 static unsigned HOST_WIDE_INT
9520 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9521 bool allow_func_align)
9522 {
9523 enum tree_code code;
9524
9525 *residue = 0;
9526
9527 code = TREE_CODE (expr);
9528 if (code == ADDR_EXPR)
9529 {
9530 unsigned int bitalign;
9531 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9532 *residue /= BITS_PER_UNIT;
9533 return bitalign / BITS_PER_UNIT;
9534 }
9535 else if (code == POINTER_PLUS_EXPR)
9536 {
9537 tree op0, op1;
9538 unsigned HOST_WIDE_INT modulus;
9539 enum tree_code inner_code;
9540
9541 op0 = TREE_OPERAND (expr, 0);
9542 STRIP_NOPS (op0);
9543 modulus = get_pointer_modulus_and_residue (op0, residue,
9544 allow_func_align);
9545
9546 op1 = TREE_OPERAND (expr, 1);
9547 STRIP_NOPS (op1);
9548 inner_code = TREE_CODE (op1);
9549 if (inner_code == INTEGER_CST)
9550 {
9551 *residue += TREE_INT_CST_LOW (op1);
9552 return modulus;
9553 }
9554 else if (inner_code == MULT_EXPR)
9555 {
9556 op1 = TREE_OPERAND (op1, 1);
9557 if (TREE_CODE (op1) == INTEGER_CST)
9558 {
9559 unsigned HOST_WIDE_INT align;
9560
9561 /* Compute the greatest power-of-2 divisor of op1. */
9562 align = TREE_INT_CST_LOW (op1);
9563 align &= -align;
9564
9565 /* If align is non-zero and less than *modulus, replace
9566 *modulus with align., If align is 0, then either op1 is 0
9567 or the greatest power-of-2 divisor of op1 doesn't fit in an
9568 unsigned HOST_WIDE_INT. In either case, no additional
9569 constraint is imposed. */
9570 if (align)
9571 modulus = MIN (modulus, align);
9572
9573 return modulus;
9574 }
9575 }
9576 }
9577
9578 /* If we get here, we were unable to determine anything useful about the
9579 expression. */
9580 return 1;
9581 }
9582
9583 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9584 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9585
9586 static bool
9587 vec_cst_ctor_to_array (tree arg, tree *elts)
9588 {
9589 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9590
9591 if (TREE_CODE (arg) == VECTOR_CST)
9592 {
9593 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9594 elts[i] = VECTOR_CST_ELT (arg, i);
9595 }
9596 else if (TREE_CODE (arg) == CONSTRUCTOR)
9597 {
9598 constructor_elt *elt;
9599
9600 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9601 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9602 return false;
9603 else
9604 elts[i] = elt->value;
9605 }
9606 else
9607 return false;
9608 for (; i < nelts; i++)
9609 elts[i]
9610 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9611 return true;
9612 }
9613
9614 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9615 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9616 NULL_TREE otherwise. */
9617
9618 static tree
9619 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9620 {
9621 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9622 tree *elts;
9623 bool need_ctor = false;
9624
9625 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9626 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9627 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9628 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9629 return NULL_TREE;
9630
9631 elts = XALLOCAVEC (tree, nelts * 3);
9632 if (!vec_cst_ctor_to_array (arg0, elts)
9633 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9634 return NULL_TREE;
9635
9636 for (i = 0; i < nelts; i++)
9637 {
9638 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9639 need_ctor = true;
9640 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9641 }
9642
9643 if (need_ctor)
9644 {
9645 vec<constructor_elt, va_gc> *v;
9646 vec_alloc (v, nelts);
9647 for (i = 0; i < nelts; i++)
9648 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9649 return build_constructor (type, v);
9650 }
9651 else
9652 return build_vector (type, &elts[2 * nelts]);
9653 }
9654
9655 /* Try to fold a pointer difference of type TYPE two address expressions of
9656 array references AREF0 and AREF1 using location LOC. Return a
9657 simplified expression for the difference or NULL_TREE. */
9658
9659 static tree
9660 fold_addr_of_array_ref_difference (location_t loc, tree type,
9661 tree aref0, tree aref1)
9662 {
9663 tree base0 = TREE_OPERAND (aref0, 0);
9664 tree base1 = TREE_OPERAND (aref1, 0);
9665 tree base_offset = build_int_cst (type, 0);
9666
9667 /* If the bases are array references as well, recurse. If the bases
9668 are pointer indirections compute the difference of the pointers.
9669 If the bases are equal, we are set. */
9670 if ((TREE_CODE (base0) == ARRAY_REF
9671 && TREE_CODE (base1) == ARRAY_REF
9672 && (base_offset
9673 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9674 || (INDIRECT_REF_P (base0)
9675 && INDIRECT_REF_P (base1)
9676 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9677 TREE_OPERAND (base0, 0),
9678 TREE_OPERAND (base1, 0))))
9679 || operand_equal_p (base0, base1, 0))
9680 {
9681 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9682 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9683 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9684 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9685 return fold_build2_loc (loc, PLUS_EXPR, type,
9686 base_offset,
9687 fold_build2_loc (loc, MULT_EXPR, type,
9688 diff, esz));
9689 }
9690 return NULL_TREE;
9691 }
9692
9693 /* If the real or vector real constant CST of type TYPE has an exact
9694 inverse, return it, else return NULL. */
9695
9696 tree
9697 exact_inverse (tree type, tree cst)
9698 {
9699 REAL_VALUE_TYPE r;
9700 tree unit_type, *elts;
9701 machine_mode mode;
9702 unsigned vec_nelts, i;
9703
9704 switch (TREE_CODE (cst))
9705 {
9706 case REAL_CST:
9707 r = TREE_REAL_CST (cst);
9708
9709 if (exact_real_inverse (TYPE_MODE (type), &r))
9710 return build_real (type, r);
9711
9712 return NULL_TREE;
9713
9714 case VECTOR_CST:
9715 vec_nelts = VECTOR_CST_NELTS (cst);
9716 elts = XALLOCAVEC (tree, vec_nelts);
9717 unit_type = TREE_TYPE (type);
9718 mode = TYPE_MODE (unit_type);
9719
9720 for (i = 0; i < vec_nelts; i++)
9721 {
9722 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9723 if (!exact_real_inverse (mode, &r))
9724 return NULL_TREE;
9725 elts[i] = build_real (unit_type, r);
9726 }
9727
9728 return build_vector (type, elts);
9729
9730 default:
9731 return NULL_TREE;
9732 }
9733 }
9734
9735 /* Mask out the tz least significant bits of X of type TYPE where
9736 tz is the number of trailing zeroes in Y. */
9737 static wide_int
9738 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9739 {
9740 int tz = wi::ctz (y);
9741 if (tz > 0)
9742 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9743 return x;
9744 }
9745
9746 /* Return true when T is an address and is known to be nonzero.
9747 For floating point we further ensure that T is not denormal.
9748 Similar logic is present in nonzero_address in rtlanal.h.
9749
9750 If the return value is based on the assumption that signed overflow
9751 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9752 change *STRICT_OVERFLOW_P. */
9753
9754 static bool
9755 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9756 {
9757 tree type = TREE_TYPE (t);
9758 enum tree_code code;
9759
9760 /* Doing something useful for floating point would need more work. */
9761 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9762 return false;
9763
9764 code = TREE_CODE (t);
9765 switch (TREE_CODE_CLASS (code))
9766 {
9767 case tcc_unary:
9768 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9769 strict_overflow_p);
9770 case tcc_binary:
9771 case tcc_comparison:
9772 return tree_binary_nonzero_warnv_p (code, type,
9773 TREE_OPERAND (t, 0),
9774 TREE_OPERAND (t, 1),
9775 strict_overflow_p);
9776 case tcc_constant:
9777 case tcc_declaration:
9778 case tcc_reference:
9779 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9780
9781 default:
9782 break;
9783 }
9784
9785 switch (code)
9786 {
9787 case TRUTH_NOT_EXPR:
9788 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9789 strict_overflow_p);
9790
9791 case TRUTH_AND_EXPR:
9792 case TRUTH_OR_EXPR:
9793 case TRUTH_XOR_EXPR:
9794 return tree_binary_nonzero_warnv_p (code, type,
9795 TREE_OPERAND (t, 0),
9796 TREE_OPERAND (t, 1),
9797 strict_overflow_p);
9798
9799 case COND_EXPR:
9800 case CONSTRUCTOR:
9801 case OBJ_TYPE_REF:
9802 case ASSERT_EXPR:
9803 case ADDR_EXPR:
9804 case WITH_SIZE_EXPR:
9805 case SSA_NAME:
9806 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9807
9808 case COMPOUND_EXPR:
9809 case MODIFY_EXPR:
9810 case BIND_EXPR:
9811 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9812 strict_overflow_p);
9813
9814 case SAVE_EXPR:
9815 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9816 strict_overflow_p);
9817
9818 case CALL_EXPR:
9819 {
9820 tree fndecl = get_callee_fndecl (t);
9821 if (!fndecl) return false;
9822 if (flag_delete_null_pointer_checks && !flag_check_new
9823 && DECL_IS_OPERATOR_NEW (fndecl)
9824 && !TREE_NOTHROW (fndecl))
9825 return true;
9826 if (flag_delete_null_pointer_checks
9827 && lookup_attribute ("returns_nonnull",
9828 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9829 return true;
9830 return alloca_call_p (t);
9831 }
9832
9833 default:
9834 break;
9835 }
9836 return false;
9837 }
9838
9839 /* Return true when T is an address and is known to be nonzero.
9840 Handle warnings about undefined signed overflow. */
9841
9842 static bool
9843 tree_expr_nonzero_p (tree t)
9844 {
9845 bool ret, strict_overflow_p;
9846
9847 strict_overflow_p = false;
9848 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9849 if (strict_overflow_p)
9850 fold_overflow_warning (("assuming signed overflow does not occur when "
9851 "determining that expression is always "
9852 "non-zero"),
9853 WARN_STRICT_OVERFLOW_MISC);
9854 return ret;
9855 }
9856
9857 /* Fold a binary expression of code CODE and type TYPE with operands
9858 OP0 and OP1. LOC is the location of the resulting expression.
9859 Return the folded expression if folding is successful. Otherwise,
9860 return NULL_TREE. */
9861
9862 tree
9863 fold_binary_loc (location_t loc,
9864 enum tree_code code, tree type, tree op0, tree op1)
9865 {
9866 enum tree_code_class kind = TREE_CODE_CLASS (code);
9867 tree arg0, arg1, tem;
9868 tree t1 = NULL_TREE;
9869 bool strict_overflow_p;
9870 unsigned int prec;
9871
9872 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9873 && TREE_CODE_LENGTH (code) == 2
9874 && op0 != NULL_TREE
9875 && op1 != NULL_TREE);
9876
9877 arg0 = op0;
9878 arg1 = op1;
9879
9880 /* Strip any conversions that don't change the mode. This is
9881 safe for every expression, except for a comparison expression
9882 because its signedness is derived from its operands. So, in
9883 the latter case, only strip conversions that don't change the
9884 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9885 preserved.
9886
9887 Note that this is done as an internal manipulation within the
9888 constant folder, in order to find the simplest representation
9889 of the arguments so that their form can be studied. In any
9890 cases, the appropriate type conversions should be put back in
9891 the tree that will get out of the constant folder. */
9892
9893 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9894 {
9895 STRIP_SIGN_NOPS (arg0);
9896 STRIP_SIGN_NOPS (arg1);
9897 }
9898 else
9899 {
9900 STRIP_NOPS (arg0);
9901 STRIP_NOPS (arg1);
9902 }
9903
9904 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9905 constant but we can't do arithmetic on them. */
9906 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9907 {
9908 tem = const_binop (code, type, arg0, arg1);
9909 if (tem != NULL_TREE)
9910 {
9911 if (TREE_TYPE (tem) != type)
9912 tem = fold_convert_loc (loc, type, tem);
9913 return tem;
9914 }
9915 }
9916
9917 /* If this is a commutative operation, and ARG0 is a constant, move it
9918 to ARG1 to reduce the number of tests below. */
9919 if (commutative_tree_code (code)
9920 && tree_swap_operands_p (arg0, arg1, true))
9921 return fold_build2_loc (loc, code, type, op1, op0);
9922
9923 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9924 to ARG1 to reduce the number of tests below. */
9925 if (kind == tcc_comparison
9926 && tree_swap_operands_p (arg0, arg1, true))
9927 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9928
9929 tem = generic_simplify (loc, code, type, op0, op1);
9930 if (tem)
9931 return tem;
9932
9933 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9934
9935 First check for cases where an arithmetic operation is applied to a
9936 compound, conditional, or comparison operation. Push the arithmetic
9937 operation inside the compound or conditional to see if any folding
9938 can then be done. Convert comparison to conditional for this purpose.
9939 The also optimizes non-constant cases that used to be done in
9940 expand_expr.
9941
9942 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9943 one of the operands is a comparison and the other is a comparison, a
9944 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9945 code below would make the expression more complex. Change it to a
9946 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9947 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9948
9949 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9950 || code == EQ_EXPR || code == NE_EXPR)
9951 && TREE_CODE (type) != VECTOR_TYPE
9952 && ((truth_value_p (TREE_CODE (arg0))
9953 && (truth_value_p (TREE_CODE (arg1))
9954 || (TREE_CODE (arg1) == BIT_AND_EXPR
9955 && integer_onep (TREE_OPERAND (arg1, 1)))))
9956 || (truth_value_p (TREE_CODE (arg1))
9957 && (truth_value_p (TREE_CODE (arg0))
9958 || (TREE_CODE (arg0) == BIT_AND_EXPR
9959 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9960 {
9961 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9962 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9963 : TRUTH_XOR_EXPR,
9964 boolean_type_node,
9965 fold_convert_loc (loc, boolean_type_node, arg0),
9966 fold_convert_loc (loc, boolean_type_node, arg1));
9967
9968 if (code == EQ_EXPR)
9969 tem = invert_truthvalue_loc (loc, tem);
9970
9971 return fold_convert_loc (loc, type, tem);
9972 }
9973
9974 if (TREE_CODE_CLASS (code) == tcc_binary
9975 || TREE_CODE_CLASS (code) == tcc_comparison)
9976 {
9977 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9978 {
9979 tem = fold_build2_loc (loc, code, type,
9980 fold_convert_loc (loc, TREE_TYPE (op0),
9981 TREE_OPERAND (arg0, 1)), op1);
9982 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9983 tem);
9984 }
9985 if (TREE_CODE (arg1) == COMPOUND_EXPR
9986 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9987 {
9988 tem = fold_build2_loc (loc, code, type, op0,
9989 fold_convert_loc (loc, TREE_TYPE (op1),
9990 TREE_OPERAND (arg1, 1)));
9991 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9992 tem);
9993 }
9994
9995 if (TREE_CODE (arg0) == COND_EXPR
9996 || TREE_CODE (arg0) == VEC_COND_EXPR
9997 || COMPARISON_CLASS_P (arg0))
9998 {
9999 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10000 arg0, arg1,
10001 /*cond_first_p=*/1);
10002 if (tem != NULL_TREE)
10003 return tem;
10004 }
10005
10006 if (TREE_CODE (arg1) == COND_EXPR
10007 || TREE_CODE (arg1) == VEC_COND_EXPR
10008 || COMPARISON_CLASS_P (arg1))
10009 {
10010 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
10011 arg1, arg0,
10012 /*cond_first_p=*/0);
10013 if (tem != NULL_TREE)
10014 return tem;
10015 }
10016 }
10017
10018 switch (code)
10019 {
10020 case MEM_REF:
10021 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10022 if (TREE_CODE (arg0) == ADDR_EXPR
10023 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10024 {
10025 tree iref = TREE_OPERAND (arg0, 0);
10026 return fold_build2 (MEM_REF, type,
10027 TREE_OPERAND (iref, 0),
10028 int_const_binop (PLUS_EXPR, arg1,
10029 TREE_OPERAND (iref, 1)));
10030 }
10031
10032 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10033 if (TREE_CODE (arg0) == ADDR_EXPR
10034 && handled_component_p (TREE_OPERAND (arg0, 0)))
10035 {
10036 tree base;
10037 HOST_WIDE_INT coffset;
10038 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10039 &coffset);
10040 if (!base)
10041 return NULL_TREE;
10042 return fold_build2 (MEM_REF, type,
10043 build_fold_addr_expr (base),
10044 int_const_binop (PLUS_EXPR, arg1,
10045 size_int (coffset)));
10046 }
10047
10048 return NULL_TREE;
10049
10050 case POINTER_PLUS_EXPR:
10051 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10052 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10053 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10054 return fold_convert_loc (loc, type,
10055 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10056 fold_convert_loc (loc, sizetype,
10057 arg1),
10058 fold_convert_loc (loc, sizetype,
10059 arg0)));
10060
10061 return NULL_TREE;
10062
10063 case PLUS_EXPR:
10064 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10065 {
10066 /* X + (X / CST) * -CST is X % CST. */
10067 if (TREE_CODE (arg1) == MULT_EXPR
10068 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10069 && operand_equal_p (arg0,
10070 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10071 {
10072 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10073 tree cst1 = TREE_OPERAND (arg1, 1);
10074 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10075 cst1, cst0);
10076 if (sum && integer_zerop (sum))
10077 return fold_convert_loc (loc, type,
10078 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10079 TREE_TYPE (arg0), arg0,
10080 cst0));
10081 }
10082 }
10083
10084 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10085 one. Make sure the type is not saturating and has the signedness of
10086 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10087 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10088 if ((TREE_CODE (arg0) == MULT_EXPR
10089 || TREE_CODE (arg1) == MULT_EXPR)
10090 && !TYPE_SATURATING (type)
10091 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10092 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10093 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10094 {
10095 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10096 if (tem)
10097 return tem;
10098 }
10099
10100 if (! FLOAT_TYPE_P (type))
10101 {
10102 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10103 with a constant, and the two constants have no bits in common,
10104 we should treat this as a BIT_IOR_EXPR since this may produce more
10105 simplifications. */
10106 if (TREE_CODE (arg0) == BIT_AND_EXPR
10107 && TREE_CODE (arg1) == BIT_AND_EXPR
10108 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10109 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10110 && wi::bit_and (TREE_OPERAND (arg0, 1),
10111 TREE_OPERAND (arg1, 1)) == 0)
10112 {
10113 code = BIT_IOR_EXPR;
10114 goto bit_ior;
10115 }
10116
10117 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10118 (plus (plus (mult) (mult)) (foo)) so that we can
10119 take advantage of the factoring cases below. */
10120 if (ANY_INTEGRAL_TYPE_P (type)
10121 && TYPE_OVERFLOW_WRAPS (type)
10122 && (((TREE_CODE (arg0) == PLUS_EXPR
10123 || TREE_CODE (arg0) == MINUS_EXPR)
10124 && TREE_CODE (arg1) == MULT_EXPR)
10125 || ((TREE_CODE (arg1) == PLUS_EXPR
10126 || TREE_CODE (arg1) == MINUS_EXPR)
10127 && TREE_CODE (arg0) == MULT_EXPR)))
10128 {
10129 tree parg0, parg1, parg, marg;
10130 enum tree_code pcode;
10131
10132 if (TREE_CODE (arg1) == MULT_EXPR)
10133 parg = arg0, marg = arg1;
10134 else
10135 parg = arg1, marg = arg0;
10136 pcode = TREE_CODE (parg);
10137 parg0 = TREE_OPERAND (parg, 0);
10138 parg1 = TREE_OPERAND (parg, 1);
10139 STRIP_NOPS (parg0);
10140 STRIP_NOPS (parg1);
10141
10142 if (TREE_CODE (parg0) == MULT_EXPR
10143 && TREE_CODE (parg1) != MULT_EXPR)
10144 return fold_build2_loc (loc, pcode, type,
10145 fold_build2_loc (loc, PLUS_EXPR, type,
10146 fold_convert_loc (loc, type,
10147 parg0),
10148 fold_convert_loc (loc, type,
10149 marg)),
10150 fold_convert_loc (loc, type, parg1));
10151 if (TREE_CODE (parg0) != MULT_EXPR
10152 && TREE_CODE (parg1) == MULT_EXPR)
10153 return
10154 fold_build2_loc (loc, PLUS_EXPR, type,
10155 fold_convert_loc (loc, type, parg0),
10156 fold_build2_loc (loc, pcode, type,
10157 fold_convert_loc (loc, type, marg),
10158 fold_convert_loc (loc, type,
10159 parg1)));
10160 }
10161 }
10162 else
10163 {
10164 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10165 to __complex__ ( x, y ). This is not the same for SNaNs or
10166 if signed zeros are involved. */
10167 if (!HONOR_SNANS (element_mode (arg0))
10168 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10169 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10170 {
10171 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10172 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10173 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10174 bool arg0rz = false, arg0iz = false;
10175 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10176 || (arg0i && (arg0iz = real_zerop (arg0i))))
10177 {
10178 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10179 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10180 if (arg0rz && arg1i && real_zerop (arg1i))
10181 {
10182 tree rp = arg1r ? arg1r
10183 : build1 (REALPART_EXPR, rtype, arg1);
10184 tree ip = arg0i ? arg0i
10185 : build1 (IMAGPART_EXPR, rtype, arg0);
10186 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10187 }
10188 else if (arg0iz && arg1r && real_zerop (arg1r))
10189 {
10190 tree rp = arg0r ? arg0r
10191 : build1 (REALPART_EXPR, rtype, arg0);
10192 tree ip = arg1i ? arg1i
10193 : build1 (IMAGPART_EXPR, rtype, arg1);
10194 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10195 }
10196 }
10197 }
10198
10199 if (flag_unsafe_math_optimizations
10200 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10201 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10202 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10203 return tem;
10204
10205 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10206 We associate floats only if the user has specified
10207 -fassociative-math. */
10208 if (flag_associative_math
10209 && TREE_CODE (arg1) == PLUS_EXPR
10210 && TREE_CODE (arg0) != MULT_EXPR)
10211 {
10212 tree tree10 = TREE_OPERAND (arg1, 0);
10213 tree tree11 = TREE_OPERAND (arg1, 1);
10214 if (TREE_CODE (tree11) == MULT_EXPR
10215 && TREE_CODE (tree10) == MULT_EXPR)
10216 {
10217 tree tree0;
10218 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10219 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10220 }
10221 }
10222 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10223 We associate floats only if the user has specified
10224 -fassociative-math. */
10225 if (flag_associative_math
10226 && TREE_CODE (arg0) == PLUS_EXPR
10227 && TREE_CODE (arg1) != MULT_EXPR)
10228 {
10229 tree tree00 = TREE_OPERAND (arg0, 0);
10230 tree tree01 = TREE_OPERAND (arg0, 1);
10231 if (TREE_CODE (tree01) == MULT_EXPR
10232 && TREE_CODE (tree00) == MULT_EXPR)
10233 {
10234 tree tree0;
10235 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10236 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10237 }
10238 }
10239 }
10240
10241 bit_rotate:
10242 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10243 is a rotate of A by C1 bits. */
10244 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10245 is a rotate of A by B bits. */
10246 {
10247 enum tree_code code0, code1;
10248 tree rtype;
10249 code0 = TREE_CODE (arg0);
10250 code1 = TREE_CODE (arg1);
10251 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10252 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10253 && operand_equal_p (TREE_OPERAND (arg0, 0),
10254 TREE_OPERAND (arg1, 0), 0)
10255 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10256 TYPE_UNSIGNED (rtype))
10257 /* Only create rotates in complete modes. Other cases are not
10258 expanded properly. */
10259 && (element_precision (rtype)
10260 == element_precision (TYPE_MODE (rtype))))
10261 {
10262 tree tree01, tree11;
10263 enum tree_code code01, code11;
10264
10265 tree01 = TREE_OPERAND (arg0, 1);
10266 tree11 = TREE_OPERAND (arg1, 1);
10267 STRIP_NOPS (tree01);
10268 STRIP_NOPS (tree11);
10269 code01 = TREE_CODE (tree01);
10270 code11 = TREE_CODE (tree11);
10271 if (code01 == INTEGER_CST
10272 && code11 == INTEGER_CST
10273 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10274 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10275 {
10276 tem = build2_loc (loc, LROTATE_EXPR,
10277 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10278 TREE_OPERAND (arg0, 0),
10279 code0 == LSHIFT_EXPR
10280 ? TREE_OPERAND (arg0, 1)
10281 : TREE_OPERAND (arg1, 1));
10282 return fold_convert_loc (loc, type, tem);
10283 }
10284 else if (code11 == MINUS_EXPR)
10285 {
10286 tree tree110, tree111;
10287 tree110 = TREE_OPERAND (tree11, 0);
10288 tree111 = TREE_OPERAND (tree11, 1);
10289 STRIP_NOPS (tree110);
10290 STRIP_NOPS (tree111);
10291 if (TREE_CODE (tree110) == INTEGER_CST
10292 && 0 == compare_tree_int (tree110,
10293 element_precision
10294 (TREE_TYPE (TREE_OPERAND
10295 (arg0, 0))))
10296 && operand_equal_p (tree01, tree111, 0))
10297 return
10298 fold_convert_loc (loc, type,
10299 build2 ((code0 == LSHIFT_EXPR
10300 ? LROTATE_EXPR
10301 : RROTATE_EXPR),
10302 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10303 TREE_OPERAND (arg0, 0),
10304 TREE_OPERAND (arg0, 1)));
10305 }
10306 else if (code01 == MINUS_EXPR)
10307 {
10308 tree tree010, tree011;
10309 tree010 = TREE_OPERAND (tree01, 0);
10310 tree011 = TREE_OPERAND (tree01, 1);
10311 STRIP_NOPS (tree010);
10312 STRIP_NOPS (tree011);
10313 if (TREE_CODE (tree010) == INTEGER_CST
10314 && 0 == compare_tree_int (tree010,
10315 element_precision
10316 (TREE_TYPE (TREE_OPERAND
10317 (arg0, 0))))
10318 && operand_equal_p (tree11, tree011, 0))
10319 return fold_convert_loc
10320 (loc, type,
10321 build2 ((code0 != LSHIFT_EXPR
10322 ? LROTATE_EXPR
10323 : RROTATE_EXPR),
10324 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10325 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10326 }
10327 }
10328 }
10329
10330 associate:
10331 /* In most languages, can't associate operations on floats through
10332 parentheses. Rather than remember where the parentheses were, we
10333 don't associate floats at all, unless the user has specified
10334 -fassociative-math.
10335 And, we need to make sure type is not saturating. */
10336
10337 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10338 && !TYPE_SATURATING (type))
10339 {
10340 tree var0, con0, lit0, minus_lit0;
10341 tree var1, con1, lit1, minus_lit1;
10342 tree atype = type;
10343 bool ok = true;
10344
10345 /* Split both trees into variables, constants, and literals. Then
10346 associate each group together, the constants with literals,
10347 then the result with variables. This increases the chances of
10348 literals being recombined later and of generating relocatable
10349 expressions for the sum of a constant and literal. */
10350 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10351 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10352 code == MINUS_EXPR);
10353
10354 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10355 if (code == MINUS_EXPR)
10356 code = PLUS_EXPR;
10357
10358 /* With undefined overflow prefer doing association in a type
10359 which wraps on overflow, if that is one of the operand types. */
10360 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10361 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10362 {
10363 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10364 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10365 atype = TREE_TYPE (arg0);
10366 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10367 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10368 atype = TREE_TYPE (arg1);
10369 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10370 }
10371
10372 /* With undefined overflow we can only associate constants with one
10373 variable, and constants whose association doesn't overflow. */
10374 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10375 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10376 {
10377 if (var0 && var1)
10378 {
10379 tree tmp0 = var0;
10380 tree tmp1 = var1;
10381
10382 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10383 tmp0 = TREE_OPERAND (tmp0, 0);
10384 if (CONVERT_EXPR_P (tmp0)
10385 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10386 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10387 <= TYPE_PRECISION (atype)))
10388 tmp0 = TREE_OPERAND (tmp0, 0);
10389 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10390 tmp1 = TREE_OPERAND (tmp1, 0);
10391 if (CONVERT_EXPR_P (tmp1)
10392 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10393 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10394 <= TYPE_PRECISION (atype)))
10395 tmp1 = TREE_OPERAND (tmp1, 0);
10396 /* The only case we can still associate with two variables
10397 is if they are the same, modulo negation and bit-pattern
10398 preserving conversions. */
10399 if (!operand_equal_p (tmp0, tmp1, 0))
10400 ok = false;
10401 }
10402 }
10403
10404 /* Only do something if we found more than two objects. Otherwise,
10405 nothing has changed and we risk infinite recursion. */
10406 if (ok
10407 && (2 < ((var0 != 0) + (var1 != 0)
10408 + (con0 != 0) + (con1 != 0)
10409 + (lit0 != 0) + (lit1 != 0)
10410 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10411 {
10412 bool any_overflows = false;
10413 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10414 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10415 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10416 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10417 var0 = associate_trees (loc, var0, var1, code, atype);
10418 con0 = associate_trees (loc, con0, con1, code, atype);
10419 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10420 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10421 code, atype);
10422
10423 /* Preserve the MINUS_EXPR if the negative part of the literal is
10424 greater than the positive part. Otherwise, the multiplicative
10425 folding code (i.e extract_muldiv) may be fooled in case
10426 unsigned constants are subtracted, like in the following
10427 example: ((X*2 + 4) - 8U)/2. */
10428 if (minus_lit0 && lit0)
10429 {
10430 if (TREE_CODE (lit0) == INTEGER_CST
10431 && TREE_CODE (minus_lit0) == INTEGER_CST
10432 && tree_int_cst_lt (lit0, minus_lit0))
10433 {
10434 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10435 MINUS_EXPR, atype);
10436 lit0 = 0;
10437 }
10438 else
10439 {
10440 lit0 = associate_trees (loc, lit0, minus_lit0,
10441 MINUS_EXPR, atype);
10442 minus_lit0 = 0;
10443 }
10444 }
10445
10446 /* Don't introduce overflows through reassociation. */
10447 if (!any_overflows
10448 && ((lit0 && TREE_OVERFLOW_P (lit0))
10449 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10450 return NULL_TREE;
10451
10452 if (minus_lit0)
10453 {
10454 if (con0 == 0)
10455 return
10456 fold_convert_loc (loc, type,
10457 associate_trees (loc, var0, minus_lit0,
10458 MINUS_EXPR, atype));
10459 else
10460 {
10461 con0 = associate_trees (loc, con0, minus_lit0,
10462 MINUS_EXPR, atype);
10463 return
10464 fold_convert_loc (loc, type,
10465 associate_trees (loc, var0, con0,
10466 PLUS_EXPR, atype));
10467 }
10468 }
10469
10470 con0 = associate_trees (loc, con0, lit0, code, atype);
10471 return
10472 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10473 code, atype));
10474 }
10475 }
10476
10477 return NULL_TREE;
10478
10479 case MINUS_EXPR:
10480 /* Pointer simplifications for subtraction, simple reassociations. */
10481 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10482 {
10483 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10484 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10485 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10486 {
10487 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10488 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10489 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10490 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10491 return fold_build2_loc (loc, PLUS_EXPR, type,
10492 fold_build2_loc (loc, MINUS_EXPR, type,
10493 arg00, arg10),
10494 fold_build2_loc (loc, MINUS_EXPR, type,
10495 arg01, arg11));
10496 }
10497 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10498 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10499 {
10500 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10501 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10502 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10503 fold_convert_loc (loc, type, arg1));
10504 if (tmp)
10505 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10506 }
10507 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10508 simplifies. */
10509 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10510 {
10511 tree arg10 = fold_convert_loc (loc, type,
10512 TREE_OPERAND (arg1, 0));
10513 tree arg11 = fold_convert_loc (loc, type,
10514 TREE_OPERAND (arg1, 1));
10515 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10516 fold_convert_loc (loc, type, arg0),
10517 arg10);
10518 if (tmp)
10519 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10520 }
10521 }
10522 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10523 if (TREE_CODE (arg0) == NEGATE_EXPR
10524 && negate_expr_p (arg1)
10525 && reorder_operands_p (arg0, arg1))
10526 return fold_build2_loc (loc, MINUS_EXPR, type,
10527 fold_convert_loc (loc, type,
10528 negate_expr (arg1)),
10529 fold_convert_loc (loc, type,
10530 TREE_OPERAND (arg0, 0)));
10531
10532 /* X - (X / Y) * Y is X % Y. */
10533 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10534 && TREE_CODE (arg1) == MULT_EXPR
10535 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10536 && operand_equal_p (arg0,
10537 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10538 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10539 TREE_OPERAND (arg1, 1), 0))
10540 return
10541 fold_convert_loc (loc, type,
10542 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10543 arg0, TREE_OPERAND (arg1, 1)));
10544
10545 if (! FLOAT_TYPE_P (type))
10546 {
10547 /* Fold A - (A & B) into ~B & A. */
10548 if (!TREE_SIDE_EFFECTS (arg0)
10549 && TREE_CODE (arg1) == BIT_AND_EXPR)
10550 {
10551 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10552 {
10553 tree arg10 = fold_convert_loc (loc, type,
10554 TREE_OPERAND (arg1, 0));
10555 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10556 fold_build1_loc (loc, BIT_NOT_EXPR,
10557 type, arg10),
10558 fold_convert_loc (loc, type, arg0));
10559 }
10560 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10561 {
10562 tree arg11 = fold_convert_loc (loc,
10563 type, TREE_OPERAND (arg1, 1));
10564 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10565 fold_build1_loc (loc, BIT_NOT_EXPR,
10566 type, arg11),
10567 fold_convert_loc (loc, type, arg0));
10568 }
10569 }
10570
10571 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10572 any power of 2 minus 1. */
10573 if (TREE_CODE (arg0) == BIT_AND_EXPR
10574 && TREE_CODE (arg1) == BIT_AND_EXPR
10575 && operand_equal_p (TREE_OPERAND (arg0, 0),
10576 TREE_OPERAND (arg1, 0), 0))
10577 {
10578 tree mask0 = TREE_OPERAND (arg0, 1);
10579 tree mask1 = TREE_OPERAND (arg1, 1);
10580 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10581
10582 if (operand_equal_p (tem, mask1, 0))
10583 {
10584 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10585 TREE_OPERAND (arg0, 0), mask1);
10586 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10587 }
10588 }
10589 }
10590
10591 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10592 __complex__ ( x, -y ). This is not the same for SNaNs or if
10593 signed zeros are involved. */
10594 if (!HONOR_SNANS (element_mode (arg0))
10595 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10596 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10597 {
10598 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10599 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10600 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10601 bool arg0rz = false, arg0iz = false;
10602 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10603 || (arg0i && (arg0iz = real_zerop (arg0i))))
10604 {
10605 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10606 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10607 if (arg0rz && arg1i && real_zerop (arg1i))
10608 {
10609 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10610 arg1r ? arg1r
10611 : build1 (REALPART_EXPR, rtype, arg1));
10612 tree ip = arg0i ? arg0i
10613 : build1 (IMAGPART_EXPR, rtype, arg0);
10614 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10615 }
10616 else if (arg0iz && arg1r && real_zerop (arg1r))
10617 {
10618 tree rp = arg0r ? arg0r
10619 : build1 (REALPART_EXPR, rtype, arg0);
10620 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10621 arg1i ? arg1i
10622 : build1 (IMAGPART_EXPR, rtype, arg1));
10623 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10624 }
10625 }
10626 }
10627
10628 /* A - B -> A + (-B) if B is easily negatable. */
10629 if (negate_expr_p (arg1)
10630 && !TYPE_OVERFLOW_SANITIZED (type)
10631 && ((FLOAT_TYPE_P (type)
10632 /* Avoid this transformation if B is a positive REAL_CST. */
10633 && (TREE_CODE (arg1) != REAL_CST
10634 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10635 || INTEGRAL_TYPE_P (type)))
10636 return fold_build2_loc (loc, PLUS_EXPR, type,
10637 fold_convert_loc (loc, type, arg0),
10638 fold_convert_loc (loc, type,
10639 negate_expr (arg1)));
10640
10641 /* Try folding difference of addresses. */
10642 {
10643 HOST_WIDE_INT diff;
10644
10645 if ((TREE_CODE (arg0) == ADDR_EXPR
10646 || TREE_CODE (arg1) == ADDR_EXPR)
10647 && ptr_difference_const (arg0, arg1, &diff))
10648 return build_int_cst_type (type, diff);
10649 }
10650
10651 /* Fold &a[i] - &a[j] to i-j. */
10652 if (TREE_CODE (arg0) == ADDR_EXPR
10653 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10654 && TREE_CODE (arg1) == ADDR_EXPR
10655 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10656 {
10657 tree tem = fold_addr_of_array_ref_difference (loc, type,
10658 TREE_OPERAND (arg0, 0),
10659 TREE_OPERAND (arg1, 0));
10660 if (tem)
10661 return tem;
10662 }
10663
10664 if (FLOAT_TYPE_P (type)
10665 && flag_unsafe_math_optimizations
10666 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10667 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10668 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10669 return tem;
10670
10671 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10672 one. Make sure the type is not saturating and has the signedness of
10673 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10674 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10675 if ((TREE_CODE (arg0) == MULT_EXPR
10676 || TREE_CODE (arg1) == MULT_EXPR)
10677 && !TYPE_SATURATING (type)
10678 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10679 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10680 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10681 {
10682 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10683 if (tem)
10684 return tem;
10685 }
10686
10687 goto associate;
10688
10689 case MULT_EXPR:
10690 /* (-A) * (-B) -> A * B */
10691 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10692 return fold_build2_loc (loc, MULT_EXPR, type,
10693 fold_convert_loc (loc, type,
10694 TREE_OPERAND (arg0, 0)),
10695 fold_convert_loc (loc, type,
10696 negate_expr (arg1)));
10697 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10698 return fold_build2_loc (loc, MULT_EXPR, type,
10699 fold_convert_loc (loc, type,
10700 negate_expr (arg0)),
10701 fold_convert_loc (loc, type,
10702 TREE_OPERAND (arg1, 0)));
10703
10704 if (! FLOAT_TYPE_P (type))
10705 {
10706 /* Transform x * -C into -x * C if x is easily negatable. */
10707 if (TREE_CODE (arg1) == INTEGER_CST
10708 && tree_int_cst_sgn (arg1) == -1
10709 && negate_expr_p (arg0)
10710 && (tem = negate_expr (arg1)) != arg1
10711 && !TREE_OVERFLOW (tem))
10712 return fold_build2_loc (loc, MULT_EXPR, type,
10713 fold_convert_loc (loc, type,
10714 negate_expr (arg0)),
10715 tem);
10716
10717 /* (a * (1 << b)) is (a << b) */
10718 if (TREE_CODE (arg1) == LSHIFT_EXPR
10719 && integer_onep (TREE_OPERAND (arg1, 0)))
10720 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10721 TREE_OPERAND (arg1, 1));
10722 if (TREE_CODE (arg0) == LSHIFT_EXPR
10723 && integer_onep (TREE_OPERAND (arg0, 0)))
10724 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10725 TREE_OPERAND (arg0, 1));
10726
10727 /* (A + A) * C -> A * 2 * C */
10728 if (TREE_CODE (arg0) == PLUS_EXPR
10729 && TREE_CODE (arg1) == INTEGER_CST
10730 && operand_equal_p (TREE_OPERAND (arg0, 0),
10731 TREE_OPERAND (arg0, 1), 0))
10732 return fold_build2_loc (loc, MULT_EXPR, type,
10733 omit_one_operand_loc (loc, type,
10734 TREE_OPERAND (arg0, 0),
10735 TREE_OPERAND (arg0, 1)),
10736 fold_build2_loc (loc, MULT_EXPR, type,
10737 build_int_cst (type, 2) , arg1));
10738
10739 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10740 sign-changing only. */
10741 if (TREE_CODE (arg1) == INTEGER_CST
10742 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10743 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10744 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10745
10746 strict_overflow_p = false;
10747 if (TREE_CODE (arg1) == INTEGER_CST
10748 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10749 &strict_overflow_p)))
10750 {
10751 if (strict_overflow_p)
10752 fold_overflow_warning (("assuming signed overflow does not "
10753 "occur when simplifying "
10754 "multiplication"),
10755 WARN_STRICT_OVERFLOW_MISC);
10756 return fold_convert_loc (loc, type, tem);
10757 }
10758
10759 /* Optimize z * conj(z) for integer complex numbers. */
10760 if (TREE_CODE (arg0) == CONJ_EXPR
10761 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10762 return fold_mult_zconjz (loc, type, arg1);
10763 if (TREE_CODE (arg1) == CONJ_EXPR
10764 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10765 return fold_mult_zconjz (loc, type, arg0);
10766 }
10767 else
10768 {
10769 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10770 the result for floating point types due to rounding so it is applied
10771 only if -fassociative-math was specify. */
10772 if (flag_associative_math
10773 && TREE_CODE (arg0) == RDIV_EXPR
10774 && TREE_CODE (arg1) == REAL_CST
10775 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10776 {
10777 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10778 arg1);
10779 if (tem)
10780 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10781 TREE_OPERAND (arg0, 1));
10782 }
10783
10784 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10785 if (operand_equal_p (arg0, arg1, 0))
10786 {
10787 tree tem = fold_strip_sign_ops (arg0);
10788 if (tem != NULL_TREE)
10789 {
10790 tem = fold_convert_loc (loc, type, tem);
10791 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10792 }
10793 }
10794
10795 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10796 This is not the same for NaNs or if signed zeros are
10797 involved. */
10798 if (!HONOR_NANS (arg0)
10799 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10800 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10801 && TREE_CODE (arg1) == COMPLEX_CST
10802 && real_zerop (TREE_REALPART (arg1)))
10803 {
10804 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10805 if (real_onep (TREE_IMAGPART (arg1)))
10806 return
10807 fold_build2_loc (loc, COMPLEX_EXPR, type,
10808 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10809 rtype, arg0)),
10810 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10811 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10812 return
10813 fold_build2_loc (loc, COMPLEX_EXPR, type,
10814 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10815 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10816 rtype, arg0)));
10817 }
10818
10819 /* Optimize z * conj(z) for floating point complex numbers.
10820 Guarded by flag_unsafe_math_optimizations as non-finite
10821 imaginary components don't produce scalar results. */
10822 if (flag_unsafe_math_optimizations
10823 && TREE_CODE (arg0) == CONJ_EXPR
10824 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10825 return fold_mult_zconjz (loc, type, arg1);
10826 if (flag_unsafe_math_optimizations
10827 && TREE_CODE (arg1) == CONJ_EXPR
10828 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10829 return fold_mult_zconjz (loc, type, arg0);
10830
10831 if (flag_unsafe_math_optimizations)
10832 {
10833 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10834 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10835
10836 /* Optimizations of root(...)*root(...). */
10837 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10838 {
10839 tree rootfn, arg;
10840 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10841 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10842
10843 /* Optimize sqrt(x)*sqrt(x) as x. */
10844 if (BUILTIN_SQRT_P (fcode0)
10845 && operand_equal_p (arg00, arg10, 0)
10846 && ! HONOR_SNANS (element_mode (type)))
10847 return arg00;
10848
10849 /* Optimize root(x)*root(y) as root(x*y). */
10850 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10851 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10852 return build_call_expr_loc (loc, rootfn, 1, arg);
10853 }
10854
10855 /* Optimize expN(x)*expN(y) as expN(x+y). */
10856 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10857 {
10858 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10859 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10860 CALL_EXPR_ARG (arg0, 0),
10861 CALL_EXPR_ARG (arg1, 0));
10862 return build_call_expr_loc (loc, expfn, 1, arg);
10863 }
10864
10865 /* Optimizations of pow(...)*pow(...). */
10866 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10867 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10868 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10869 {
10870 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10871 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10872 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10873 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10874
10875 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10876 if (operand_equal_p (arg01, arg11, 0))
10877 {
10878 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10879 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10880 arg00, arg10);
10881 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10882 }
10883
10884 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10885 if (operand_equal_p (arg00, arg10, 0))
10886 {
10887 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10888 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10889 arg01, arg11);
10890 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10891 }
10892 }
10893
10894 /* Optimize tan(x)*cos(x) as sin(x). */
10895 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10896 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10897 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10898 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10899 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10900 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10901 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10902 CALL_EXPR_ARG (arg1, 0), 0))
10903 {
10904 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10905
10906 if (sinfn != NULL_TREE)
10907 return build_call_expr_loc (loc, sinfn, 1,
10908 CALL_EXPR_ARG (arg0, 0));
10909 }
10910
10911 /* Optimize x*pow(x,c) as pow(x,c+1). */
10912 if (fcode1 == BUILT_IN_POW
10913 || fcode1 == BUILT_IN_POWF
10914 || fcode1 == BUILT_IN_POWL)
10915 {
10916 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10917 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10918 if (TREE_CODE (arg11) == REAL_CST
10919 && !TREE_OVERFLOW (arg11)
10920 && operand_equal_p (arg0, arg10, 0))
10921 {
10922 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10923 REAL_VALUE_TYPE c;
10924 tree arg;
10925
10926 c = TREE_REAL_CST (arg11);
10927 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10928 arg = build_real (type, c);
10929 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10930 }
10931 }
10932
10933 /* Optimize pow(x,c)*x as pow(x,c+1). */
10934 if (fcode0 == BUILT_IN_POW
10935 || fcode0 == BUILT_IN_POWF
10936 || fcode0 == BUILT_IN_POWL)
10937 {
10938 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10939 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10940 if (TREE_CODE (arg01) == REAL_CST
10941 && !TREE_OVERFLOW (arg01)
10942 && operand_equal_p (arg1, arg00, 0))
10943 {
10944 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10945 REAL_VALUE_TYPE c;
10946 tree arg;
10947
10948 c = TREE_REAL_CST (arg01);
10949 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10950 arg = build_real (type, c);
10951 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10952 }
10953 }
10954
10955 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10956 if (!in_gimple_form
10957 && optimize
10958 && operand_equal_p (arg0, arg1, 0))
10959 {
10960 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10961
10962 if (powfn)
10963 {
10964 tree arg = build_real (type, dconst2);
10965 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10966 }
10967 }
10968 }
10969 }
10970 goto associate;
10971
10972 case BIT_IOR_EXPR:
10973 bit_ior:
10974 /* ~X | X is -1. */
10975 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10976 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10977 {
10978 t1 = build_zero_cst (type);
10979 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10980 return omit_one_operand_loc (loc, type, t1, arg1);
10981 }
10982
10983 /* X | ~X is -1. */
10984 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10985 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10986 {
10987 t1 = build_zero_cst (type);
10988 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10989 return omit_one_operand_loc (loc, type, t1, arg0);
10990 }
10991
10992 /* Canonicalize (X & C1) | C2. */
10993 if (TREE_CODE (arg0) == BIT_AND_EXPR
10994 && TREE_CODE (arg1) == INTEGER_CST
10995 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10996 {
10997 int width = TYPE_PRECISION (type), w;
10998 wide_int c1 = TREE_OPERAND (arg0, 1);
10999 wide_int c2 = arg1;
11000
11001 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
11002 if ((c1 & c2) == c1)
11003 return omit_one_operand_loc (loc, type, arg1,
11004 TREE_OPERAND (arg0, 0));
11005
11006 wide_int msk = wi::mask (width, false,
11007 TYPE_PRECISION (TREE_TYPE (arg1)));
11008
11009 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
11010 if (msk.and_not (c1 | c2) == 0)
11011 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11012 TREE_OPERAND (arg0, 0), arg1);
11013
11014 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
11015 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11016 mode which allows further optimizations. */
11017 c1 &= msk;
11018 c2 &= msk;
11019 wide_int c3 = c1.and_not (c2);
11020 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11021 {
11022 wide_int mask = wi::mask (w, false,
11023 TYPE_PRECISION (type));
11024 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11025 {
11026 c3 = mask;
11027 break;
11028 }
11029 }
11030
11031 if (c3 != c1)
11032 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11033 fold_build2_loc (loc, BIT_AND_EXPR, type,
11034 TREE_OPERAND (arg0, 0),
11035 wide_int_to_tree (type,
11036 c3)),
11037 arg1);
11038 }
11039
11040 /* (X & ~Y) | (~X & Y) is X ^ Y */
11041 if (TREE_CODE (arg0) == BIT_AND_EXPR
11042 && TREE_CODE (arg1) == BIT_AND_EXPR)
11043 {
11044 tree a0, a1, l0, l1, n0, n1;
11045
11046 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11047 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11048
11049 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11050 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11051
11052 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11053 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11054
11055 if ((operand_equal_p (n0, a0, 0)
11056 && operand_equal_p (n1, a1, 0))
11057 || (operand_equal_p (n0, a1, 0)
11058 && operand_equal_p (n1, a0, 0)))
11059 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11060 }
11061
11062 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11063 if (t1 != NULL_TREE)
11064 return t1;
11065
11066 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11067
11068 This results in more efficient code for machines without a NAND
11069 instruction. Combine will canonicalize to the first form
11070 which will allow use of NAND instructions provided by the
11071 backend if they exist. */
11072 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11073 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11074 {
11075 return
11076 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11077 build2 (BIT_AND_EXPR, type,
11078 fold_convert_loc (loc, type,
11079 TREE_OPERAND (arg0, 0)),
11080 fold_convert_loc (loc, type,
11081 TREE_OPERAND (arg1, 0))));
11082 }
11083
11084 /* See if this can be simplified into a rotate first. If that
11085 is unsuccessful continue in the association code. */
11086 goto bit_rotate;
11087
11088 case BIT_XOR_EXPR:
11089 /* ~X ^ X is -1. */
11090 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11091 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11092 {
11093 t1 = build_zero_cst (type);
11094 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11095 return omit_one_operand_loc (loc, type, t1, arg1);
11096 }
11097
11098 /* X ^ ~X is -1. */
11099 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11100 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11101 {
11102 t1 = build_zero_cst (type);
11103 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11104 return omit_one_operand_loc (loc, type, t1, arg0);
11105 }
11106
11107 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11108 with a constant, and the two constants have no bits in common,
11109 we should treat this as a BIT_IOR_EXPR since this may produce more
11110 simplifications. */
11111 if (TREE_CODE (arg0) == BIT_AND_EXPR
11112 && TREE_CODE (arg1) == BIT_AND_EXPR
11113 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11114 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11115 && wi::bit_and (TREE_OPERAND (arg0, 1),
11116 TREE_OPERAND (arg1, 1)) == 0)
11117 {
11118 code = BIT_IOR_EXPR;
11119 goto bit_ior;
11120 }
11121
11122 /* (X | Y) ^ X -> Y & ~ X*/
11123 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11124 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11125 {
11126 tree t2 = TREE_OPERAND (arg0, 1);
11127 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11128 arg1);
11129 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11130 fold_convert_loc (loc, type, t2),
11131 fold_convert_loc (loc, type, t1));
11132 return t1;
11133 }
11134
11135 /* (Y | X) ^ X -> Y & ~ X*/
11136 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11137 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11138 {
11139 tree t2 = TREE_OPERAND (arg0, 0);
11140 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11141 arg1);
11142 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11143 fold_convert_loc (loc, type, t2),
11144 fold_convert_loc (loc, type, t1));
11145 return t1;
11146 }
11147
11148 /* X ^ (X | Y) -> Y & ~ X*/
11149 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11150 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11151 {
11152 tree t2 = TREE_OPERAND (arg1, 1);
11153 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11154 arg0);
11155 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11156 fold_convert_loc (loc, type, t2),
11157 fold_convert_loc (loc, type, t1));
11158 return t1;
11159 }
11160
11161 /* X ^ (Y | X) -> Y & ~ X*/
11162 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11163 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11164 {
11165 tree t2 = TREE_OPERAND (arg1, 0);
11166 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11167 arg0);
11168 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11169 fold_convert_loc (loc, type, t2),
11170 fold_convert_loc (loc, type, t1));
11171 return t1;
11172 }
11173
11174 /* Convert ~X ^ ~Y to X ^ Y. */
11175 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11176 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11177 return fold_build2_loc (loc, code, type,
11178 fold_convert_loc (loc, type,
11179 TREE_OPERAND (arg0, 0)),
11180 fold_convert_loc (loc, type,
11181 TREE_OPERAND (arg1, 0)));
11182
11183 /* Convert ~X ^ C to X ^ ~C. */
11184 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11185 && TREE_CODE (arg1) == INTEGER_CST)
11186 return fold_build2_loc (loc, code, type,
11187 fold_convert_loc (loc, type,
11188 TREE_OPERAND (arg0, 0)),
11189 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11190
11191 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11192 if (TREE_CODE (arg0) == BIT_AND_EXPR
11193 && INTEGRAL_TYPE_P (type)
11194 && integer_onep (TREE_OPERAND (arg0, 1))
11195 && integer_onep (arg1))
11196 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11197 build_zero_cst (TREE_TYPE (arg0)));
11198
11199 /* Fold (X & Y) ^ Y as ~X & Y. */
11200 if (TREE_CODE (arg0) == BIT_AND_EXPR
11201 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11202 {
11203 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11204 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11205 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11206 fold_convert_loc (loc, type, arg1));
11207 }
11208 /* Fold (X & Y) ^ X as ~Y & X. */
11209 if (TREE_CODE (arg0) == BIT_AND_EXPR
11210 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11211 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11212 {
11213 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11214 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11215 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11216 fold_convert_loc (loc, type, arg1));
11217 }
11218 /* Fold X ^ (X & Y) as X & ~Y. */
11219 if (TREE_CODE (arg1) == BIT_AND_EXPR
11220 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11221 {
11222 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11223 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11224 fold_convert_loc (loc, type, arg0),
11225 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11226 }
11227 /* Fold X ^ (Y & X) as ~Y & X. */
11228 if (TREE_CODE (arg1) == BIT_AND_EXPR
11229 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11230 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11231 {
11232 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11233 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11234 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11235 fold_convert_loc (loc, type, arg0));
11236 }
11237
11238 /* See if this can be simplified into a rotate first. If that
11239 is unsuccessful continue in the association code. */
11240 goto bit_rotate;
11241
11242 case BIT_AND_EXPR:
11243 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11244 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11245 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11246 || (TREE_CODE (arg0) == EQ_EXPR
11247 && integer_zerop (TREE_OPERAND (arg0, 1))))
11248 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11249 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11250
11251 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11252 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11253 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11254 || (TREE_CODE (arg1) == EQ_EXPR
11255 && integer_zerop (TREE_OPERAND (arg1, 1))))
11256 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11257 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11258
11259 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11260 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11261 && INTEGRAL_TYPE_P (type)
11262 && integer_onep (TREE_OPERAND (arg0, 1))
11263 && integer_onep (arg1))
11264 {
11265 tree tem2;
11266 tem = TREE_OPERAND (arg0, 0);
11267 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11268 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11269 tem, tem2);
11270 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11271 build_zero_cst (TREE_TYPE (tem)));
11272 }
11273 /* Fold ~X & 1 as (X & 1) == 0. */
11274 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11275 && INTEGRAL_TYPE_P (type)
11276 && integer_onep (arg1))
11277 {
11278 tree tem2;
11279 tem = TREE_OPERAND (arg0, 0);
11280 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11281 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11282 tem, tem2);
11283 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11284 build_zero_cst (TREE_TYPE (tem)));
11285 }
11286 /* Fold !X & 1 as X == 0. */
11287 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11288 && integer_onep (arg1))
11289 {
11290 tem = TREE_OPERAND (arg0, 0);
11291 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11292 build_zero_cst (TREE_TYPE (tem)));
11293 }
11294
11295 /* Fold (X ^ Y) & Y as ~X & Y. */
11296 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11297 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11298 {
11299 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11300 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11301 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11302 fold_convert_loc (loc, type, arg1));
11303 }
11304 /* Fold (X ^ Y) & X as ~Y & X. */
11305 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11306 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11307 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11308 {
11309 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11310 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11311 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11312 fold_convert_loc (loc, type, arg1));
11313 }
11314 /* Fold X & (X ^ Y) as X & ~Y. */
11315 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11316 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11317 {
11318 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11319 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11320 fold_convert_loc (loc, type, arg0),
11321 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11322 }
11323 /* Fold X & (Y ^ X) as ~Y & X. */
11324 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11325 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11326 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11327 {
11328 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11329 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11330 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11331 fold_convert_loc (loc, type, arg0));
11332 }
11333
11334 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11335 multiple of 1 << CST. */
11336 if (TREE_CODE (arg1) == INTEGER_CST)
11337 {
11338 wide_int cst1 = arg1;
11339 wide_int ncst1 = -cst1;
11340 if ((cst1 & ncst1) == ncst1
11341 && multiple_of_p (type, arg0,
11342 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11343 return fold_convert_loc (loc, type, arg0);
11344 }
11345
11346 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11347 bits from CST2. */
11348 if (TREE_CODE (arg1) == INTEGER_CST
11349 && TREE_CODE (arg0) == MULT_EXPR
11350 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11351 {
11352 wide_int warg1 = arg1;
11353 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11354
11355 if (masked == 0)
11356 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11357 arg0, arg1);
11358 else if (masked != warg1)
11359 {
11360 /* Avoid the transform if arg1 is a mask of some
11361 mode which allows further optimizations. */
11362 int pop = wi::popcount (warg1);
11363 if (!(pop >= BITS_PER_UNIT
11364 && exact_log2 (pop) != -1
11365 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11366 return fold_build2_loc (loc, code, type, op0,
11367 wide_int_to_tree (type, masked));
11368 }
11369 }
11370
11371 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11372 ((A & N) + B) & M -> (A + B) & M
11373 Similarly if (N & M) == 0,
11374 ((A | N) + B) & M -> (A + B) & M
11375 and for - instead of + (or unary - instead of +)
11376 and/or ^ instead of |.
11377 If B is constant and (B & M) == 0, fold into A & M. */
11378 if (TREE_CODE (arg1) == INTEGER_CST)
11379 {
11380 wide_int cst1 = arg1;
11381 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11382 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11383 && (TREE_CODE (arg0) == PLUS_EXPR
11384 || TREE_CODE (arg0) == MINUS_EXPR
11385 || TREE_CODE (arg0) == NEGATE_EXPR)
11386 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11387 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11388 {
11389 tree pmop[2];
11390 int which = 0;
11391 wide_int cst0;
11392
11393 /* Now we know that arg0 is (C + D) or (C - D) or
11394 -C and arg1 (M) is == (1LL << cst) - 1.
11395 Store C into PMOP[0] and D into PMOP[1]. */
11396 pmop[0] = TREE_OPERAND (arg0, 0);
11397 pmop[1] = NULL;
11398 if (TREE_CODE (arg0) != NEGATE_EXPR)
11399 {
11400 pmop[1] = TREE_OPERAND (arg0, 1);
11401 which = 1;
11402 }
11403
11404 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11405 which = -1;
11406
11407 for (; which >= 0; which--)
11408 switch (TREE_CODE (pmop[which]))
11409 {
11410 case BIT_AND_EXPR:
11411 case BIT_IOR_EXPR:
11412 case BIT_XOR_EXPR:
11413 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11414 != INTEGER_CST)
11415 break;
11416 cst0 = TREE_OPERAND (pmop[which], 1);
11417 cst0 &= cst1;
11418 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11419 {
11420 if (cst0 != cst1)
11421 break;
11422 }
11423 else if (cst0 != 0)
11424 break;
11425 /* If C or D is of the form (A & N) where
11426 (N & M) == M, or of the form (A | N) or
11427 (A ^ N) where (N & M) == 0, replace it with A. */
11428 pmop[which] = TREE_OPERAND (pmop[which], 0);
11429 break;
11430 case INTEGER_CST:
11431 /* If C or D is a N where (N & M) == 0, it can be
11432 omitted (assumed 0). */
11433 if ((TREE_CODE (arg0) == PLUS_EXPR
11434 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11435 && (cst1 & pmop[which]) == 0)
11436 pmop[which] = NULL;
11437 break;
11438 default:
11439 break;
11440 }
11441
11442 /* Only build anything new if we optimized one or both arguments
11443 above. */
11444 if (pmop[0] != TREE_OPERAND (arg0, 0)
11445 || (TREE_CODE (arg0) != NEGATE_EXPR
11446 && pmop[1] != TREE_OPERAND (arg0, 1)))
11447 {
11448 tree utype = TREE_TYPE (arg0);
11449 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11450 {
11451 /* Perform the operations in a type that has defined
11452 overflow behavior. */
11453 utype = unsigned_type_for (TREE_TYPE (arg0));
11454 if (pmop[0] != NULL)
11455 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11456 if (pmop[1] != NULL)
11457 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11458 }
11459
11460 if (TREE_CODE (arg0) == NEGATE_EXPR)
11461 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11462 else if (TREE_CODE (arg0) == PLUS_EXPR)
11463 {
11464 if (pmop[0] != NULL && pmop[1] != NULL)
11465 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11466 pmop[0], pmop[1]);
11467 else if (pmop[0] != NULL)
11468 tem = pmop[0];
11469 else if (pmop[1] != NULL)
11470 tem = pmop[1];
11471 else
11472 return build_int_cst (type, 0);
11473 }
11474 else if (pmop[0] == NULL)
11475 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11476 else
11477 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11478 pmop[0], pmop[1]);
11479 /* TEM is now the new binary +, - or unary - replacement. */
11480 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11481 fold_convert_loc (loc, utype, arg1));
11482 return fold_convert_loc (loc, type, tem);
11483 }
11484 }
11485 }
11486
11487 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11488 if (t1 != NULL_TREE)
11489 return t1;
11490 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11491 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11492 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11493 {
11494 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11495
11496 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11497 if (mask == -1)
11498 return
11499 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11500 }
11501
11502 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11503
11504 This results in more efficient code for machines without a NOR
11505 instruction. Combine will canonicalize to the first form
11506 which will allow use of NOR instructions provided by the
11507 backend if they exist. */
11508 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11509 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11510 {
11511 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11512 build2 (BIT_IOR_EXPR, type,
11513 fold_convert_loc (loc, type,
11514 TREE_OPERAND (arg0, 0)),
11515 fold_convert_loc (loc, type,
11516 TREE_OPERAND (arg1, 0))));
11517 }
11518
11519 /* If arg0 is derived from the address of an object or function, we may
11520 be able to fold this expression using the object or function's
11521 alignment. */
11522 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11523 {
11524 unsigned HOST_WIDE_INT modulus, residue;
11525 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11526
11527 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11528 integer_onep (arg1));
11529
11530 /* This works because modulus is a power of 2. If this weren't the
11531 case, we'd have to replace it by its greatest power-of-2
11532 divisor: modulus & -modulus. */
11533 if (low < modulus)
11534 return build_int_cst (type, residue & low);
11535 }
11536
11537 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11538 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11539 if the new mask might be further optimized. */
11540 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11541 || TREE_CODE (arg0) == RSHIFT_EXPR)
11542 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11543 && TREE_CODE (arg1) == INTEGER_CST
11544 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11545 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11546 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11547 < TYPE_PRECISION (TREE_TYPE (arg0))))
11548 {
11549 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11550 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11551 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11552 tree shift_type = TREE_TYPE (arg0);
11553
11554 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11555 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11556 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11557 && TYPE_PRECISION (TREE_TYPE (arg0))
11558 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11559 {
11560 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11561 tree arg00 = TREE_OPERAND (arg0, 0);
11562 /* See if more bits can be proven as zero because of
11563 zero extension. */
11564 if (TREE_CODE (arg00) == NOP_EXPR
11565 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11566 {
11567 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11568 if (TYPE_PRECISION (inner_type)
11569 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11570 && TYPE_PRECISION (inner_type) < prec)
11571 {
11572 prec = TYPE_PRECISION (inner_type);
11573 /* See if we can shorten the right shift. */
11574 if (shiftc < prec)
11575 shift_type = inner_type;
11576 /* Otherwise X >> C1 is all zeros, so we'll optimize
11577 it into (X, 0) later on by making sure zerobits
11578 is all ones. */
11579 }
11580 }
11581 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11582 if (shiftc < prec)
11583 {
11584 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11585 zerobits <<= prec - shiftc;
11586 }
11587 /* For arithmetic shift if sign bit could be set, zerobits
11588 can contain actually sign bits, so no transformation is
11589 possible, unless MASK masks them all away. In that
11590 case the shift needs to be converted into logical shift. */
11591 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11592 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11593 {
11594 if ((mask & zerobits) == 0)
11595 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11596 else
11597 zerobits = 0;
11598 }
11599 }
11600
11601 /* ((X << 16) & 0xff00) is (X, 0). */
11602 if ((mask & zerobits) == mask)
11603 return omit_one_operand_loc (loc, type,
11604 build_int_cst (type, 0), arg0);
11605
11606 newmask = mask | zerobits;
11607 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11608 {
11609 /* Only do the transformation if NEWMASK is some integer
11610 mode's mask. */
11611 for (prec = BITS_PER_UNIT;
11612 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11613 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11614 break;
11615 if (prec < HOST_BITS_PER_WIDE_INT
11616 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11617 {
11618 tree newmaskt;
11619
11620 if (shift_type != TREE_TYPE (arg0))
11621 {
11622 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11623 fold_convert_loc (loc, shift_type,
11624 TREE_OPERAND (arg0, 0)),
11625 TREE_OPERAND (arg0, 1));
11626 tem = fold_convert_loc (loc, type, tem);
11627 }
11628 else
11629 tem = op0;
11630 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11631 if (!tree_int_cst_equal (newmaskt, arg1))
11632 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11633 }
11634 }
11635 }
11636
11637 goto associate;
11638
11639 case RDIV_EXPR:
11640 /* Don't touch a floating-point divide by zero unless the mode
11641 of the constant can represent infinity. */
11642 if (TREE_CODE (arg1) == REAL_CST
11643 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11644 && real_zerop (arg1))
11645 return NULL_TREE;
11646
11647 /* (-A) / (-B) -> A / B */
11648 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11649 return fold_build2_loc (loc, RDIV_EXPR, type,
11650 TREE_OPERAND (arg0, 0),
11651 negate_expr (arg1));
11652 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11653 return fold_build2_loc (loc, RDIV_EXPR, type,
11654 negate_expr (arg0),
11655 TREE_OPERAND (arg1, 0));
11656
11657 /* Convert A/B/C to A/(B*C). */
11658 if (flag_reciprocal_math
11659 && TREE_CODE (arg0) == RDIV_EXPR)
11660 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11661 fold_build2_loc (loc, MULT_EXPR, type,
11662 TREE_OPERAND (arg0, 1), arg1));
11663
11664 /* Convert A/(B/C) to (A/B)*C. */
11665 if (flag_reciprocal_math
11666 && TREE_CODE (arg1) == RDIV_EXPR)
11667 return fold_build2_loc (loc, MULT_EXPR, type,
11668 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11669 TREE_OPERAND (arg1, 0)),
11670 TREE_OPERAND (arg1, 1));
11671
11672 /* Convert C1/(X*C2) into (C1/C2)/X. */
11673 if (flag_reciprocal_math
11674 && TREE_CODE (arg1) == MULT_EXPR
11675 && TREE_CODE (arg0) == REAL_CST
11676 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11677 {
11678 tree tem = const_binop (RDIV_EXPR, arg0,
11679 TREE_OPERAND (arg1, 1));
11680 if (tem)
11681 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11682 TREE_OPERAND (arg1, 0));
11683 }
11684
11685 if (flag_unsafe_math_optimizations)
11686 {
11687 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11688 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11689
11690 /* Optimize sin(x)/cos(x) as tan(x). */
11691 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11692 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11693 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11694 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11695 CALL_EXPR_ARG (arg1, 0), 0))
11696 {
11697 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11698
11699 if (tanfn != NULL_TREE)
11700 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11701 }
11702
11703 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11704 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11705 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11706 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11707 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11708 CALL_EXPR_ARG (arg1, 0), 0))
11709 {
11710 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11711
11712 if (tanfn != NULL_TREE)
11713 {
11714 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11715 CALL_EXPR_ARG (arg0, 0));
11716 return fold_build2_loc (loc, RDIV_EXPR, type,
11717 build_real (type, dconst1), tmp);
11718 }
11719 }
11720
11721 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11722 NaNs or Infinities. */
11723 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11724 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11725 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11726 {
11727 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11728 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11729
11730 if (! HONOR_NANS (arg00)
11731 && ! HONOR_INFINITIES (element_mode (arg00))
11732 && operand_equal_p (arg00, arg01, 0))
11733 {
11734 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11735
11736 if (cosfn != NULL_TREE)
11737 return build_call_expr_loc (loc, cosfn, 1, arg00);
11738 }
11739 }
11740
11741 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11742 NaNs or Infinities. */
11743 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11744 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11745 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11746 {
11747 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11748 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11749
11750 if (! HONOR_NANS (arg00)
11751 && ! HONOR_INFINITIES (element_mode (arg00))
11752 && operand_equal_p (arg00, arg01, 0))
11753 {
11754 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11755
11756 if (cosfn != NULL_TREE)
11757 {
11758 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11759 return fold_build2_loc (loc, RDIV_EXPR, type,
11760 build_real (type, dconst1),
11761 tmp);
11762 }
11763 }
11764 }
11765
11766 /* Optimize pow(x,c)/x as pow(x,c-1). */
11767 if (fcode0 == BUILT_IN_POW
11768 || fcode0 == BUILT_IN_POWF
11769 || fcode0 == BUILT_IN_POWL)
11770 {
11771 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11772 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11773 if (TREE_CODE (arg01) == REAL_CST
11774 && !TREE_OVERFLOW (arg01)
11775 && operand_equal_p (arg1, arg00, 0))
11776 {
11777 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11778 REAL_VALUE_TYPE c;
11779 tree arg;
11780
11781 c = TREE_REAL_CST (arg01);
11782 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11783 arg = build_real (type, c);
11784 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11785 }
11786 }
11787
11788 /* Optimize a/root(b/c) into a*root(c/b). */
11789 if (BUILTIN_ROOT_P (fcode1))
11790 {
11791 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11792
11793 if (TREE_CODE (rootarg) == RDIV_EXPR)
11794 {
11795 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11796 tree b = TREE_OPERAND (rootarg, 0);
11797 tree c = TREE_OPERAND (rootarg, 1);
11798
11799 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11800
11801 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11802 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11803 }
11804 }
11805
11806 /* Optimize x/expN(y) into x*expN(-y). */
11807 if (BUILTIN_EXPONENT_P (fcode1))
11808 {
11809 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11810 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11811 arg1 = build_call_expr_loc (loc,
11812 expfn, 1,
11813 fold_convert_loc (loc, type, arg));
11814 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11815 }
11816
11817 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11818 if (fcode1 == BUILT_IN_POW
11819 || fcode1 == BUILT_IN_POWF
11820 || fcode1 == BUILT_IN_POWL)
11821 {
11822 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11823 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11824 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11825 tree neg11 = fold_convert_loc (loc, type,
11826 negate_expr (arg11));
11827 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11828 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11829 }
11830 }
11831 return NULL_TREE;
11832
11833 case TRUNC_DIV_EXPR:
11834 /* Optimize (X & (-A)) / A where A is a power of 2,
11835 to X >> log2(A) */
11836 if (TREE_CODE (arg0) == BIT_AND_EXPR
11837 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11838 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11839 {
11840 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11841 arg1, TREE_OPERAND (arg0, 1));
11842 if (sum && integer_zerop (sum)) {
11843 tree pow2 = build_int_cst (integer_type_node,
11844 wi::exact_log2 (arg1));
11845 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11846 TREE_OPERAND (arg0, 0), pow2);
11847 }
11848 }
11849
11850 /* Fall through */
11851
11852 case FLOOR_DIV_EXPR:
11853 /* Simplify A / (B << N) where A and B are positive and B is
11854 a power of 2, to A >> (N + log2(B)). */
11855 strict_overflow_p = false;
11856 if (TREE_CODE (arg1) == LSHIFT_EXPR
11857 && (TYPE_UNSIGNED (type)
11858 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11859 {
11860 tree sval = TREE_OPERAND (arg1, 0);
11861 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11862 {
11863 tree sh_cnt = TREE_OPERAND (arg1, 1);
11864 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11865 wi::exact_log2 (sval));
11866
11867 if (strict_overflow_p)
11868 fold_overflow_warning (("assuming signed overflow does not "
11869 "occur when simplifying A / (B << N)"),
11870 WARN_STRICT_OVERFLOW_MISC);
11871
11872 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11873 sh_cnt, pow2);
11874 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11875 fold_convert_loc (loc, type, arg0), sh_cnt);
11876 }
11877 }
11878
11879 /* Fall through */
11880
11881 case ROUND_DIV_EXPR:
11882 case CEIL_DIV_EXPR:
11883 case EXACT_DIV_EXPR:
11884 if (integer_zerop (arg1))
11885 return NULL_TREE;
11886
11887 /* Convert -A / -B to A / B when the type is signed and overflow is
11888 undefined. */
11889 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11890 && TREE_CODE (arg0) == NEGATE_EXPR
11891 && negate_expr_p (arg1))
11892 {
11893 if (INTEGRAL_TYPE_P (type))
11894 fold_overflow_warning (("assuming signed overflow does not occur "
11895 "when distributing negation across "
11896 "division"),
11897 WARN_STRICT_OVERFLOW_MISC);
11898 return fold_build2_loc (loc, code, type,
11899 fold_convert_loc (loc, type,
11900 TREE_OPERAND (arg0, 0)),
11901 fold_convert_loc (loc, type,
11902 negate_expr (arg1)));
11903 }
11904 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11905 && TREE_CODE (arg1) == NEGATE_EXPR
11906 && negate_expr_p (arg0))
11907 {
11908 if (INTEGRAL_TYPE_P (type))
11909 fold_overflow_warning (("assuming signed overflow does not occur "
11910 "when distributing negation across "
11911 "division"),
11912 WARN_STRICT_OVERFLOW_MISC);
11913 return fold_build2_loc (loc, code, type,
11914 fold_convert_loc (loc, type,
11915 negate_expr (arg0)),
11916 fold_convert_loc (loc, type,
11917 TREE_OPERAND (arg1, 0)));
11918 }
11919
11920 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11921 operation, EXACT_DIV_EXPR.
11922
11923 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11924 At one time others generated faster code, it's not clear if they do
11925 after the last round to changes to the DIV code in expmed.c. */
11926 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11927 && multiple_of_p (type, arg0, arg1))
11928 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11929
11930 strict_overflow_p = false;
11931 if (TREE_CODE (arg1) == INTEGER_CST
11932 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11933 &strict_overflow_p)))
11934 {
11935 if (strict_overflow_p)
11936 fold_overflow_warning (("assuming signed overflow does not occur "
11937 "when simplifying division"),
11938 WARN_STRICT_OVERFLOW_MISC);
11939 return fold_convert_loc (loc, type, tem);
11940 }
11941
11942 return NULL_TREE;
11943
11944 case CEIL_MOD_EXPR:
11945 case FLOOR_MOD_EXPR:
11946 case ROUND_MOD_EXPR:
11947 case TRUNC_MOD_EXPR:
11948 /* X % -Y is the same as X % Y. */
11949 if (code == TRUNC_MOD_EXPR
11950 && !TYPE_UNSIGNED (type)
11951 && TREE_CODE (arg1) == NEGATE_EXPR
11952 && !TYPE_OVERFLOW_TRAPS (type))
11953 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, arg0),
11954 fold_convert_loc (loc, type,
11955 TREE_OPERAND (arg1, 0)));
11956
11957 strict_overflow_p = false;
11958 if (TREE_CODE (arg1) == INTEGER_CST
11959 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11960 &strict_overflow_p)))
11961 {
11962 if (strict_overflow_p)
11963 fold_overflow_warning (("assuming signed overflow does not occur "
11964 "when simplifying modulus"),
11965 WARN_STRICT_OVERFLOW_MISC);
11966 return fold_convert_loc (loc, type, tem);
11967 }
11968
11969 /* Optimize TRUNC_MOD_EXPR by a power of two into a BIT_AND_EXPR,
11970 i.e. "X % C" into "X & (C - 1)", if X and C are positive. */
11971 if ((code == TRUNC_MOD_EXPR || code == FLOOR_MOD_EXPR)
11972 && (TYPE_UNSIGNED (type)
11973 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11974 {
11975 tree c = arg1;
11976 /* Also optimize A % (C << N) where C is a power of 2,
11977 to A & ((C << N) - 1). */
11978 if (TREE_CODE (arg1) == LSHIFT_EXPR)
11979 c = TREE_OPERAND (arg1, 0);
11980
11981 if (integer_pow2p (c) && tree_int_cst_sgn (c) > 0)
11982 {
11983 tree mask
11984 = fold_build2_loc (loc, MINUS_EXPR, TREE_TYPE (arg1), arg1,
11985 build_int_cst (TREE_TYPE (arg1), 1));
11986 if (strict_overflow_p)
11987 fold_overflow_warning (("assuming signed overflow does not "
11988 "occur when simplifying "
11989 "X % (power of two)"),
11990 WARN_STRICT_OVERFLOW_MISC);
11991 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11992 fold_convert_loc (loc, type, arg0),
11993 fold_convert_loc (loc, type, mask));
11994 }
11995 }
11996
11997 return NULL_TREE;
11998
11999 case LROTATE_EXPR:
12000 case RROTATE_EXPR:
12001 case RSHIFT_EXPR:
12002 case LSHIFT_EXPR:
12003 /* Since negative shift count is not well-defined,
12004 don't try to compute it in the compiler. */
12005 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
12006 return NULL_TREE;
12007
12008 prec = element_precision (type);
12009
12010 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
12011 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
12012 && tree_to_uhwi (arg1) < prec
12013 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12014 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12015 {
12016 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
12017 + tree_to_uhwi (arg1));
12018
12019 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
12020 being well defined. */
12021 if (low >= prec)
12022 {
12023 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
12024 low = low % prec;
12025 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
12026 return omit_one_operand_loc (loc, type, build_zero_cst (type),
12027 TREE_OPERAND (arg0, 0));
12028 else
12029 low = prec - 1;
12030 }
12031
12032 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12033 build_int_cst (TREE_TYPE (arg1), low));
12034 }
12035
12036 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
12037 into x & ((unsigned)-1 >> c) for unsigned types. */
12038 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
12039 || (TYPE_UNSIGNED (type)
12040 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
12041 && tree_fits_uhwi_p (arg1)
12042 && tree_to_uhwi (arg1) < prec
12043 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
12044 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
12045 {
12046 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
12047 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
12048 tree lshift;
12049 tree arg00;
12050
12051 if (low0 == low1)
12052 {
12053 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12054
12055 lshift = build_minus_one_cst (type);
12056 lshift = const_binop (code, lshift, arg1);
12057
12058 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12059 }
12060 }
12061
12062 /* If we have a rotate of a bit operation with the rotate count and
12063 the second operand of the bit operation both constant,
12064 permute the two operations. */
12065 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12066 && (TREE_CODE (arg0) == BIT_AND_EXPR
12067 || TREE_CODE (arg0) == BIT_IOR_EXPR
12068 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12069 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12070 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12071 fold_build2_loc (loc, code, type,
12072 TREE_OPERAND (arg0, 0), arg1),
12073 fold_build2_loc (loc, code, type,
12074 TREE_OPERAND (arg0, 1), arg1));
12075
12076 /* Two consecutive rotates adding up to the some integer
12077 multiple of the precision of the type can be ignored. */
12078 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12079 && TREE_CODE (arg0) == RROTATE_EXPR
12080 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12081 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12082 prec) == 0)
12083 return TREE_OPERAND (arg0, 0);
12084
12085 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12086 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12087 if the latter can be further optimized. */
12088 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12089 && TREE_CODE (arg0) == BIT_AND_EXPR
12090 && TREE_CODE (arg1) == INTEGER_CST
12091 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12092 {
12093 tree mask = fold_build2_loc (loc, code, type,
12094 fold_convert_loc (loc, type,
12095 TREE_OPERAND (arg0, 1)),
12096 arg1);
12097 tree shift = fold_build2_loc (loc, code, type,
12098 fold_convert_loc (loc, type,
12099 TREE_OPERAND (arg0, 0)),
12100 arg1);
12101 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12102 if (tem)
12103 return tem;
12104 }
12105
12106 return NULL_TREE;
12107
12108 case MIN_EXPR:
12109 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12110 if (tem)
12111 return tem;
12112 goto associate;
12113
12114 case MAX_EXPR:
12115 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12116 if (tem)
12117 return tem;
12118 goto associate;
12119
12120 case TRUTH_ANDIF_EXPR:
12121 /* Note that the operands of this must be ints
12122 and their values must be 0 or 1.
12123 ("true" is a fixed value perhaps depending on the language.) */
12124 /* If first arg is constant zero, return it. */
12125 if (integer_zerop (arg0))
12126 return fold_convert_loc (loc, type, arg0);
12127 case TRUTH_AND_EXPR:
12128 /* If either arg is constant true, drop it. */
12129 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12130 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12131 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12132 /* Preserve sequence points. */
12133 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12134 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12135 /* If second arg is constant zero, result is zero, but first arg
12136 must be evaluated. */
12137 if (integer_zerop (arg1))
12138 return omit_one_operand_loc (loc, type, arg1, arg0);
12139 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12140 case will be handled here. */
12141 if (integer_zerop (arg0))
12142 return omit_one_operand_loc (loc, type, arg0, arg1);
12143
12144 /* !X && X is always false. */
12145 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12146 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12147 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12148 /* X && !X is always false. */
12149 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12150 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12151 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12152
12153 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12154 means A >= Y && A != MAX, but in this case we know that
12155 A < X <= MAX. */
12156
12157 if (!TREE_SIDE_EFFECTS (arg0)
12158 && !TREE_SIDE_EFFECTS (arg1))
12159 {
12160 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12161 if (tem && !operand_equal_p (tem, arg0, 0))
12162 return fold_build2_loc (loc, code, type, tem, arg1);
12163
12164 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12165 if (tem && !operand_equal_p (tem, arg1, 0))
12166 return fold_build2_loc (loc, code, type, arg0, tem);
12167 }
12168
12169 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12170 != NULL_TREE)
12171 return tem;
12172
12173 return NULL_TREE;
12174
12175 case TRUTH_ORIF_EXPR:
12176 /* Note that the operands of this must be ints
12177 and their values must be 0 or true.
12178 ("true" is a fixed value perhaps depending on the language.) */
12179 /* If first arg is constant true, return it. */
12180 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12181 return fold_convert_loc (loc, type, arg0);
12182 case TRUTH_OR_EXPR:
12183 /* If either arg is constant zero, drop it. */
12184 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12185 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12186 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12187 /* Preserve sequence points. */
12188 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12189 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12190 /* If second arg is constant true, result is true, but we must
12191 evaluate first arg. */
12192 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12193 return omit_one_operand_loc (loc, type, arg1, arg0);
12194 /* Likewise for first arg, but note this only occurs here for
12195 TRUTH_OR_EXPR. */
12196 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12197 return omit_one_operand_loc (loc, type, arg0, arg1);
12198
12199 /* !X || X is always true. */
12200 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12201 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12202 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12203 /* X || !X is always true. */
12204 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12205 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12206 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12207
12208 /* (X && !Y) || (!X && Y) is X ^ Y */
12209 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12210 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12211 {
12212 tree a0, a1, l0, l1, n0, n1;
12213
12214 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12215 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12216
12217 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12218 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12219
12220 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12221 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12222
12223 if ((operand_equal_p (n0, a0, 0)
12224 && operand_equal_p (n1, a1, 0))
12225 || (operand_equal_p (n0, a1, 0)
12226 && operand_equal_p (n1, a0, 0)))
12227 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12228 }
12229
12230 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12231 != NULL_TREE)
12232 return tem;
12233
12234 return NULL_TREE;
12235
12236 case TRUTH_XOR_EXPR:
12237 /* If the second arg is constant zero, drop it. */
12238 if (integer_zerop (arg1))
12239 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12240 /* If the second arg is constant true, this is a logical inversion. */
12241 if (integer_onep (arg1))
12242 {
12243 tem = invert_truthvalue_loc (loc, arg0);
12244 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12245 }
12246 /* Identical arguments cancel to zero. */
12247 if (operand_equal_p (arg0, arg1, 0))
12248 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12249
12250 /* !X ^ X is always true. */
12251 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12252 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12253 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12254
12255 /* X ^ !X is always true. */
12256 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12257 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12258 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12259
12260 return NULL_TREE;
12261
12262 case EQ_EXPR:
12263 case NE_EXPR:
12264 STRIP_NOPS (arg0);
12265 STRIP_NOPS (arg1);
12266
12267 tem = fold_comparison (loc, code, type, op0, op1);
12268 if (tem != NULL_TREE)
12269 return tem;
12270
12271 /* bool_var != 0 becomes bool_var. */
12272 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12273 && code == NE_EXPR)
12274 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12275
12276 /* bool_var == 1 becomes bool_var. */
12277 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12278 && code == EQ_EXPR)
12279 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12280
12281 /* bool_var != 1 becomes !bool_var. */
12282 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12283 && code == NE_EXPR)
12284 return fold_convert_loc (loc, type,
12285 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12286 TREE_TYPE (arg0), arg0));
12287
12288 /* bool_var == 0 becomes !bool_var. */
12289 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12290 && code == EQ_EXPR)
12291 return fold_convert_loc (loc, type,
12292 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12293 TREE_TYPE (arg0), arg0));
12294
12295 /* !exp != 0 becomes !exp */
12296 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12297 && code == NE_EXPR)
12298 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12299
12300 /* If this is an equality comparison of the address of two non-weak,
12301 unaliased symbols neither of which are extern (since we do not
12302 have access to attributes for externs), then we know the result. */
12303 if (TREE_CODE (arg0) == ADDR_EXPR
12304 && DECL_P (TREE_OPERAND (arg0, 0))
12305 && TREE_CODE (arg1) == ADDR_EXPR
12306 && DECL_P (TREE_OPERAND (arg1, 0)))
12307 {
12308 int equal;
12309
12310 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12311 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12312 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12313 ->equal_address_to (symtab_node::get_create
12314 (TREE_OPERAND (arg1, 0)));
12315 else
12316 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12317 if (equal != 2)
12318 return constant_boolean_node (equal
12319 ? code == EQ_EXPR : code != EQ_EXPR,
12320 type);
12321 }
12322
12323 /* Similarly for a NEGATE_EXPR. */
12324 if (TREE_CODE (arg0) == NEGATE_EXPR
12325 && TREE_CODE (arg1) == INTEGER_CST
12326 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12327 arg1)))
12328 && TREE_CODE (tem) == INTEGER_CST
12329 && !TREE_OVERFLOW (tem))
12330 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12331
12332 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12333 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12334 && TREE_CODE (arg1) == INTEGER_CST
12335 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12336 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12337 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12338 fold_convert_loc (loc,
12339 TREE_TYPE (arg0),
12340 arg1),
12341 TREE_OPERAND (arg0, 1)));
12342
12343 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12344 if ((TREE_CODE (arg0) == PLUS_EXPR
12345 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12346 || TREE_CODE (arg0) == MINUS_EXPR)
12347 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12348 0)),
12349 arg1, 0)
12350 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12351 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12352 {
12353 tree val = TREE_OPERAND (arg0, 1);
12354 return omit_two_operands_loc (loc, type,
12355 fold_build2_loc (loc, code, type,
12356 val,
12357 build_int_cst (TREE_TYPE (val),
12358 0)),
12359 TREE_OPERAND (arg0, 0), arg1);
12360 }
12361
12362 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12363 if (TREE_CODE (arg0) == MINUS_EXPR
12364 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12365 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12366 1)),
12367 arg1, 0)
12368 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12369 {
12370 return omit_two_operands_loc (loc, type,
12371 code == NE_EXPR
12372 ? boolean_true_node : boolean_false_node,
12373 TREE_OPERAND (arg0, 1), arg1);
12374 }
12375
12376 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12377 if (TREE_CODE (arg0) == ABS_EXPR
12378 && (integer_zerop (arg1) || real_zerop (arg1)))
12379 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12380
12381 /* If this is an EQ or NE comparison with zero and ARG0 is
12382 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12383 two operations, but the latter can be done in one less insn
12384 on machines that have only two-operand insns or on which a
12385 constant cannot be the first operand. */
12386 if (TREE_CODE (arg0) == BIT_AND_EXPR
12387 && integer_zerop (arg1))
12388 {
12389 tree arg00 = TREE_OPERAND (arg0, 0);
12390 tree arg01 = TREE_OPERAND (arg0, 1);
12391 if (TREE_CODE (arg00) == LSHIFT_EXPR
12392 && integer_onep (TREE_OPERAND (arg00, 0)))
12393 {
12394 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12395 arg01, TREE_OPERAND (arg00, 1));
12396 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12397 build_int_cst (TREE_TYPE (arg0), 1));
12398 return fold_build2_loc (loc, code, type,
12399 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12400 arg1);
12401 }
12402 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12403 && integer_onep (TREE_OPERAND (arg01, 0)))
12404 {
12405 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12406 arg00, TREE_OPERAND (arg01, 1));
12407 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12408 build_int_cst (TREE_TYPE (arg0), 1));
12409 return fold_build2_loc (loc, code, type,
12410 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12411 arg1);
12412 }
12413 }
12414
12415 /* If this is an NE or EQ comparison of zero against the result of a
12416 signed MOD operation whose second operand is a power of 2, make
12417 the MOD operation unsigned since it is simpler and equivalent. */
12418 if (integer_zerop (arg1)
12419 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12420 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12421 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12422 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12423 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12424 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12425 {
12426 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12427 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12428 fold_convert_loc (loc, newtype,
12429 TREE_OPERAND (arg0, 0)),
12430 fold_convert_loc (loc, newtype,
12431 TREE_OPERAND (arg0, 1)));
12432
12433 return fold_build2_loc (loc, code, type, newmod,
12434 fold_convert_loc (loc, newtype, arg1));
12435 }
12436
12437 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12438 C1 is a valid shift constant, and C2 is a power of two, i.e.
12439 a single bit. */
12440 if (TREE_CODE (arg0) == BIT_AND_EXPR
12441 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12442 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12443 == INTEGER_CST
12444 && integer_pow2p (TREE_OPERAND (arg0, 1))
12445 && integer_zerop (arg1))
12446 {
12447 tree itype = TREE_TYPE (arg0);
12448 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12449 prec = TYPE_PRECISION (itype);
12450
12451 /* Check for a valid shift count. */
12452 if (wi::ltu_p (arg001, prec))
12453 {
12454 tree arg01 = TREE_OPERAND (arg0, 1);
12455 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12456 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12457 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12458 can be rewritten as (X & (C2 << C1)) != 0. */
12459 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12460 {
12461 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12462 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12463 return fold_build2_loc (loc, code, type, tem,
12464 fold_convert_loc (loc, itype, arg1));
12465 }
12466 /* Otherwise, for signed (arithmetic) shifts,
12467 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12468 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12469 else if (!TYPE_UNSIGNED (itype))
12470 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12471 arg000, build_int_cst (itype, 0));
12472 /* Otherwise, of unsigned (logical) shifts,
12473 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12474 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12475 else
12476 return omit_one_operand_loc (loc, type,
12477 code == EQ_EXPR ? integer_one_node
12478 : integer_zero_node,
12479 arg000);
12480 }
12481 }
12482
12483 /* If we have (A & C) == C where C is a power of 2, convert this into
12484 (A & C) != 0. Similarly for NE_EXPR. */
12485 if (TREE_CODE (arg0) == BIT_AND_EXPR
12486 && integer_pow2p (TREE_OPERAND (arg0, 1))
12487 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12488 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12489 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12490 integer_zero_node));
12491
12492 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12493 bit, then fold the expression into A < 0 or A >= 0. */
12494 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12495 if (tem)
12496 return tem;
12497
12498 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12499 Similarly for NE_EXPR. */
12500 if (TREE_CODE (arg0) == BIT_AND_EXPR
12501 && TREE_CODE (arg1) == INTEGER_CST
12502 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12503 {
12504 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12505 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12506 TREE_OPERAND (arg0, 1));
12507 tree dandnotc
12508 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12509 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12510 notc);
12511 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12512 if (integer_nonzerop (dandnotc))
12513 return omit_one_operand_loc (loc, type, rslt, arg0);
12514 }
12515
12516 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12517 Similarly for NE_EXPR. */
12518 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12519 && TREE_CODE (arg1) == INTEGER_CST
12520 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12521 {
12522 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12523 tree candnotd
12524 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12525 TREE_OPERAND (arg0, 1),
12526 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12527 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12528 if (integer_nonzerop (candnotd))
12529 return omit_one_operand_loc (loc, type, rslt, arg0);
12530 }
12531
12532 /* If this is a comparison of a field, we may be able to simplify it. */
12533 if ((TREE_CODE (arg0) == COMPONENT_REF
12534 || TREE_CODE (arg0) == BIT_FIELD_REF)
12535 /* Handle the constant case even without -O
12536 to make sure the warnings are given. */
12537 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12538 {
12539 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12540 if (t1)
12541 return t1;
12542 }
12543
12544 /* Optimize comparisons of strlen vs zero to a compare of the
12545 first character of the string vs zero. To wit,
12546 strlen(ptr) == 0 => *ptr == 0
12547 strlen(ptr) != 0 => *ptr != 0
12548 Other cases should reduce to one of these two (or a constant)
12549 due to the return value of strlen being unsigned. */
12550 if (TREE_CODE (arg0) == CALL_EXPR
12551 && integer_zerop (arg1))
12552 {
12553 tree fndecl = get_callee_fndecl (arg0);
12554
12555 if (fndecl
12556 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12557 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12558 && call_expr_nargs (arg0) == 1
12559 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12560 {
12561 tree iref = build_fold_indirect_ref_loc (loc,
12562 CALL_EXPR_ARG (arg0, 0));
12563 return fold_build2_loc (loc, code, type, iref,
12564 build_int_cst (TREE_TYPE (iref), 0));
12565 }
12566 }
12567
12568 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12569 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12570 if (TREE_CODE (arg0) == RSHIFT_EXPR
12571 && integer_zerop (arg1)
12572 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12573 {
12574 tree arg00 = TREE_OPERAND (arg0, 0);
12575 tree arg01 = TREE_OPERAND (arg0, 1);
12576 tree itype = TREE_TYPE (arg00);
12577 if (wi::eq_p (arg01, element_precision (itype) - 1))
12578 {
12579 if (TYPE_UNSIGNED (itype))
12580 {
12581 itype = signed_type_for (itype);
12582 arg00 = fold_convert_loc (loc, itype, arg00);
12583 }
12584 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12585 type, arg00, build_zero_cst (itype));
12586 }
12587 }
12588
12589 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12590 if (integer_zerop (arg1)
12591 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12592 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12593 TREE_OPERAND (arg0, 1));
12594
12595 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12596 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12597 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12598 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12599 build_zero_cst (TREE_TYPE (arg0)));
12600 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12601 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12602 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12603 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12604 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12605 build_zero_cst (TREE_TYPE (arg0)));
12606
12607 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12608 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12609 && TREE_CODE (arg1) == INTEGER_CST
12610 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12611 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12612 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12613 TREE_OPERAND (arg0, 1), arg1));
12614
12615 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12616 (X & C) == 0 when C is a single bit. */
12617 if (TREE_CODE (arg0) == BIT_AND_EXPR
12618 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12619 && integer_zerop (arg1)
12620 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12621 {
12622 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12623 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12624 TREE_OPERAND (arg0, 1));
12625 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12626 type, tem,
12627 fold_convert_loc (loc, TREE_TYPE (arg0),
12628 arg1));
12629 }
12630
12631 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12632 constant C is a power of two, i.e. a single bit. */
12633 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12634 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12635 && integer_zerop (arg1)
12636 && integer_pow2p (TREE_OPERAND (arg0, 1))
12637 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12638 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12639 {
12640 tree arg00 = TREE_OPERAND (arg0, 0);
12641 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12642 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12643 }
12644
12645 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12646 when is C is a power of two, i.e. a single bit. */
12647 if (TREE_CODE (arg0) == BIT_AND_EXPR
12648 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12649 && integer_zerop (arg1)
12650 && integer_pow2p (TREE_OPERAND (arg0, 1))
12651 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12652 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12653 {
12654 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12655 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12656 arg000, TREE_OPERAND (arg0, 1));
12657 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12658 tem, build_int_cst (TREE_TYPE (tem), 0));
12659 }
12660
12661 if (integer_zerop (arg1)
12662 && tree_expr_nonzero_p (arg0))
12663 {
12664 tree res = constant_boolean_node (code==NE_EXPR, type);
12665 return omit_one_operand_loc (loc, type, res, arg0);
12666 }
12667
12668 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12669 if (TREE_CODE (arg0) == NEGATE_EXPR
12670 && TREE_CODE (arg1) == NEGATE_EXPR)
12671 return fold_build2_loc (loc, code, type,
12672 TREE_OPERAND (arg0, 0),
12673 fold_convert_loc (loc, TREE_TYPE (arg0),
12674 TREE_OPERAND (arg1, 0)));
12675
12676 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12677 if (TREE_CODE (arg0) == BIT_AND_EXPR
12678 && TREE_CODE (arg1) == BIT_AND_EXPR)
12679 {
12680 tree arg00 = TREE_OPERAND (arg0, 0);
12681 tree arg01 = TREE_OPERAND (arg0, 1);
12682 tree arg10 = TREE_OPERAND (arg1, 0);
12683 tree arg11 = TREE_OPERAND (arg1, 1);
12684 tree itype = TREE_TYPE (arg0);
12685
12686 if (operand_equal_p (arg01, arg11, 0))
12687 return fold_build2_loc (loc, code, type,
12688 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12689 fold_build2_loc (loc,
12690 BIT_XOR_EXPR, itype,
12691 arg00, arg10),
12692 arg01),
12693 build_zero_cst (itype));
12694
12695 if (operand_equal_p (arg01, arg10, 0))
12696 return fold_build2_loc (loc, code, type,
12697 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12698 fold_build2_loc (loc,
12699 BIT_XOR_EXPR, itype,
12700 arg00, arg11),
12701 arg01),
12702 build_zero_cst (itype));
12703
12704 if (operand_equal_p (arg00, arg11, 0))
12705 return fold_build2_loc (loc, code, type,
12706 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12707 fold_build2_loc (loc,
12708 BIT_XOR_EXPR, itype,
12709 arg01, arg10),
12710 arg00),
12711 build_zero_cst (itype));
12712
12713 if (operand_equal_p (arg00, arg10, 0))
12714 return fold_build2_loc (loc, code, type,
12715 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12716 fold_build2_loc (loc,
12717 BIT_XOR_EXPR, itype,
12718 arg01, arg11),
12719 arg00),
12720 build_zero_cst (itype));
12721 }
12722
12723 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12724 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12725 {
12726 tree arg00 = TREE_OPERAND (arg0, 0);
12727 tree arg01 = TREE_OPERAND (arg0, 1);
12728 tree arg10 = TREE_OPERAND (arg1, 0);
12729 tree arg11 = TREE_OPERAND (arg1, 1);
12730 tree itype = TREE_TYPE (arg0);
12731
12732 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12733 operand_equal_p guarantees no side-effects so we don't need
12734 to use omit_one_operand on Z. */
12735 if (operand_equal_p (arg01, arg11, 0))
12736 return fold_build2_loc (loc, code, type, arg00,
12737 fold_convert_loc (loc, TREE_TYPE (arg00),
12738 arg10));
12739 if (operand_equal_p (arg01, arg10, 0))
12740 return fold_build2_loc (loc, code, type, arg00,
12741 fold_convert_loc (loc, TREE_TYPE (arg00),
12742 arg11));
12743 if (operand_equal_p (arg00, arg11, 0))
12744 return fold_build2_loc (loc, code, type, arg01,
12745 fold_convert_loc (loc, TREE_TYPE (arg01),
12746 arg10));
12747 if (operand_equal_p (arg00, arg10, 0))
12748 return fold_build2_loc (loc, code, type, arg01,
12749 fold_convert_loc (loc, TREE_TYPE (arg01),
12750 arg11));
12751
12752 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12753 if (TREE_CODE (arg01) == INTEGER_CST
12754 && TREE_CODE (arg11) == INTEGER_CST)
12755 {
12756 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12757 fold_convert_loc (loc, itype, arg11));
12758 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12759 return fold_build2_loc (loc, code, type, tem,
12760 fold_convert_loc (loc, itype, arg10));
12761 }
12762 }
12763
12764 /* Attempt to simplify equality/inequality comparisons of complex
12765 values. Only lower the comparison if the result is known or
12766 can be simplified to a single scalar comparison. */
12767 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12768 || TREE_CODE (arg0) == COMPLEX_CST)
12769 && (TREE_CODE (arg1) == COMPLEX_EXPR
12770 || TREE_CODE (arg1) == COMPLEX_CST))
12771 {
12772 tree real0, imag0, real1, imag1;
12773 tree rcond, icond;
12774
12775 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12776 {
12777 real0 = TREE_OPERAND (arg0, 0);
12778 imag0 = TREE_OPERAND (arg0, 1);
12779 }
12780 else
12781 {
12782 real0 = TREE_REALPART (arg0);
12783 imag0 = TREE_IMAGPART (arg0);
12784 }
12785
12786 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12787 {
12788 real1 = TREE_OPERAND (arg1, 0);
12789 imag1 = TREE_OPERAND (arg1, 1);
12790 }
12791 else
12792 {
12793 real1 = TREE_REALPART (arg1);
12794 imag1 = TREE_IMAGPART (arg1);
12795 }
12796
12797 rcond = fold_binary_loc (loc, code, type, real0, real1);
12798 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12799 {
12800 if (integer_zerop (rcond))
12801 {
12802 if (code == EQ_EXPR)
12803 return omit_two_operands_loc (loc, type, boolean_false_node,
12804 imag0, imag1);
12805 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12806 }
12807 else
12808 {
12809 if (code == NE_EXPR)
12810 return omit_two_operands_loc (loc, type, boolean_true_node,
12811 imag0, imag1);
12812 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12813 }
12814 }
12815
12816 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12817 if (icond && TREE_CODE (icond) == INTEGER_CST)
12818 {
12819 if (integer_zerop (icond))
12820 {
12821 if (code == EQ_EXPR)
12822 return omit_two_operands_loc (loc, type, boolean_false_node,
12823 real0, real1);
12824 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12825 }
12826 else
12827 {
12828 if (code == NE_EXPR)
12829 return omit_two_operands_loc (loc, type, boolean_true_node,
12830 real0, real1);
12831 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12832 }
12833 }
12834 }
12835
12836 return NULL_TREE;
12837
12838 case LT_EXPR:
12839 case GT_EXPR:
12840 case LE_EXPR:
12841 case GE_EXPR:
12842 tem = fold_comparison (loc, code, type, op0, op1);
12843 if (tem != NULL_TREE)
12844 return tem;
12845
12846 /* Transform comparisons of the form X +- C CMP X. */
12847 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12848 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12849 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12850 && !HONOR_SNANS (arg0))
12851 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12852 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12853 {
12854 tree arg01 = TREE_OPERAND (arg0, 1);
12855 enum tree_code code0 = TREE_CODE (arg0);
12856 int is_positive;
12857
12858 if (TREE_CODE (arg01) == REAL_CST)
12859 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12860 else
12861 is_positive = tree_int_cst_sgn (arg01);
12862
12863 /* (X - c) > X becomes false. */
12864 if (code == GT_EXPR
12865 && ((code0 == MINUS_EXPR && is_positive >= 0)
12866 || (code0 == PLUS_EXPR && is_positive <= 0)))
12867 {
12868 if (TREE_CODE (arg01) == INTEGER_CST
12869 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12870 fold_overflow_warning (("assuming signed overflow does not "
12871 "occur when assuming that (X - c) > X "
12872 "is always false"),
12873 WARN_STRICT_OVERFLOW_ALL);
12874 return constant_boolean_node (0, type);
12875 }
12876
12877 /* Likewise (X + c) < X becomes false. */
12878 if (code == LT_EXPR
12879 && ((code0 == PLUS_EXPR && is_positive >= 0)
12880 || (code0 == MINUS_EXPR && is_positive <= 0)))
12881 {
12882 if (TREE_CODE (arg01) == INTEGER_CST
12883 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12884 fold_overflow_warning (("assuming signed overflow does not "
12885 "occur when assuming that "
12886 "(X + c) < X is always false"),
12887 WARN_STRICT_OVERFLOW_ALL);
12888 return constant_boolean_node (0, type);
12889 }
12890
12891 /* Convert (X - c) <= X to true. */
12892 if (!HONOR_NANS (arg1)
12893 && code == LE_EXPR
12894 && ((code0 == MINUS_EXPR && is_positive >= 0)
12895 || (code0 == PLUS_EXPR && is_positive <= 0)))
12896 {
12897 if (TREE_CODE (arg01) == INTEGER_CST
12898 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12899 fold_overflow_warning (("assuming signed overflow does not "
12900 "occur when assuming that "
12901 "(X - c) <= X is always true"),
12902 WARN_STRICT_OVERFLOW_ALL);
12903 return constant_boolean_node (1, type);
12904 }
12905
12906 /* Convert (X + c) >= X to true. */
12907 if (!HONOR_NANS (arg1)
12908 && code == GE_EXPR
12909 && ((code0 == PLUS_EXPR && is_positive >= 0)
12910 || (code0 == MINUS_EXPR && is_positive <= 0)))
12911 {
12912 if (TREE_CODE (arg01) == INTEGER_CST
12913 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12914 fold_overflow_warning (("assuming signed overflow does not "
12915 "occur when assuming that "
12916 "(X + c) >= X is always true"),
12917 WARN_STRICT_OVERFLOW_ALL);
12918 return constant_boolean_node (1, type);
12919 }
12920
12921 if (TREE_CODE (arg01) == INTEGER_CST)
12922 {
12923 /* Convert X + c > X and X - c < X to true for integers. */
12924 if (code == GT_EXPR
12925 && ((code0 == PLUS_EXPR && is_positive > 0)
12926 || (code0 == MINUS_EXPR && is_positive < 0)))
12927 {
12928 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12929 fold_overflow_warning (("assuming signed overflow does "
12930 "not occur when assuming that "
12931 "(X + c) > X is always true"),
12932 WARN_STRICT_OVERFLOW_ALL);
12933 return constant_boolean_node (1, type);
12934 }
12935
12936 if (code == LT_EXPR
12937 && ((code0 == MINUS_EXPR && is_positive > 0)
12938 || (code0 == PLUS_EXPR && is_positive < 0)))
12939 {
12940 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12941 fold_overflow_warning (("assuming signed overflow does "
12942 "not occur when assuming that "
12943 "(X - c) < X is always true"),
12944 WARN_STRICT_OVERFLOW_ALL);
12945 return constant_boolean_node (1, type);
12946 }
12947
12948 /* Convert X + c <= X and X - c >= X to false for integers. */
12949 if (code == LE_EXPR
12950 && ((code0 == PLUS_EXPR && is_positive > 0)
12951 || (code0 == MINUS_EXPR && is_positive < 0)))
12952 {
12953 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12954 fold_overflow_warning (("assuming signed overflow does "
12955 "not occur when assuming that "
12956 "(X + c) <= X is always false"),
12957 WARN_STRICT_OVERFLOW_ALL);
12958 return constant_boolean_node (0, type);
12959 }
12960
12961 if (code == GE_EXPR
12962 && ((code0 == MINUS_EXPR && is_positive > 0)
12963 || (code0 == PLUS_EXPR && is_positive < 0)))
12964 {
12965 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12966 fold_overflow_warning (("assuming signed overflow does "
12967 "not occur when assuming that "
12968 "(X - c) >= X is always false"),
12969 WARN_STRICT_OVERFLOW_ALL);
12970 return constant_boolean_node (0, type);
12971 }
12972 }
12973 }
12974
12975 /* Comparisons with the highest or lowest possible integer of
12976 the specified precision will have known values. */
12977 {
12978 tree arg1_type = TREE_TYPE (arg1);
12979 unsigned int prec = TYPE_PRECISION (arg1_type);
12980
12981 if (TREE_CODE (arg1) == INTEGER_CST
12982 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12983 {
12984 wide_int max = wi::max_value (arg1_type);
12985 wide_int signed_max = wi::max_value (prec, SIGNED);
12986 wide_int min = wi::min_value (arg1_type);
12987
12988 if (wi::eq_p (arg1, max))
12989 switch (code)
12990 {
12991 case GT_EXPR:
12992 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12993
12994 case GE_EXPR:
12995 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12996
12997 case LE_EXPR:
12998 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12999
13000 case LT_EXPR:
13001 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13002
13003 /* The GE_EXPR and LT_EXPR cases above are not normally
13004 reached because of previous transformations. */
13005
13006 default:
13007 break;
13008 }
13009 else if (wi::eq_p (arg1, max - 1))
13010 switch (code)
13011 {
13012 case GT_EXPR:
13013 arg1 = const_binop (PLUS_EXPR, arg1,
13014 build_int_cst (TREE_TYPE (arg1), 1));
13015 return fold_build2_loc (loc, EQ_EXPR, type,
13016 fold_convert_loc (loc,
13017 TREE_TYPE (arg1), arg0),
13018 arg1);
13019 case LE_EXPR:
13020 arg1 = const_binop (PLUS_EXPR, arg1,
13021 build_int_cst (TREE_TYPE (arg1), 1));
13022 return fold_build2_loc (loc, NE_EXPR, type,
13023 fold_convert_loc (loc, TREE_TYPE (arg1),
13024 arg0),
13025 arg1);
13026 default:
13027 break;
13028 }
13029 else if (wi::eq_p (arg1, min))
13030 switch (code)
13031 {
13032 case LT_EXPR:
13033 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
13034
13035 case LE_EXPR:
13036 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
13037
13038 case GE_EXPR:
13039 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
13040
13041 case GT_EXPR:
13042 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
13043
13044 default:
13045 break;
13046 }
13047 else if (wi::eq_p (arg1, min + 1))
13048 switch (code)
13049 {
13050 case GE_EXPR:
13051 arg1 = const_binop (MINUS_EXPR, arg1,
13052 build_int_cst (TREE_TYPE (arg1), 1));
13053 return fold_build2_loc (loc, NE_EXPR, type,
13054 fold_convert_loc (loc,
13055 TREE_TYPE (arg1), arg0),
13056 arg1);
13057 case LT_EXPR:
13058 arg1 = const_binop (MINUS_EXPR, arg1,
13059 build_int_cst (TREE_TYPE (arg1), 1));
13060 return fold_build2_loc (loc, EQ_EXPR, type,
13061 fold_convert_loc (loc, TREE_TYPE (arg1),
13062 arg0),
13063 arg1);
13064 default:
13065 break;
13066 }
13067
13068 else if (wi::eq_p (arg1, signed_max)
13069 && TYPE_UNSIGNED (arg1_type)
13070 /* We will flip the signedness of the comparison operator
13071 associated with the mode of arg1, so the sign bit is
13072 specified by this mode. Check that arg1 is the signed
13073 max associated with this sign bit. */
13074 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13075 /* signed_type does not work on pointer types. */
13076 && INTEGRAL_TYPE_P (arg1_type))
13077 {
13078 /* The following case also applies to X < signed_max+1
13079 and X >= signed_max+1 because previous transformations. */
13080 if (code == LE_EXPR || code == GT_EXPR)
13081 {
13082 tree st = signed_type_for (arg1_type);
13083 return fold_build2_loc (loc,
13084 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13085 type, fold_convert_loc (loc, st, arg0),
13086 build_int_cst (st, 0));
13087 }
13088 }
13089 }
13090 }
13091
13092 /* If we are comparing an ABS_EXPR with a constant, we can
13093 convert all the cases into explicit comparisons, but they may
13094 well not be faster than doing the ABS and one comparison.
13095 But ABS (X) <= C is a range comparison, which becomes a subtraction
13096 and a comparison, and is probably faster. */
13097 if (code == LE_EXPR
13098 && TREE_CODE (arg1) == INTEGER_CST
13099 && TREE_CODE (arg0) == ABS_EXPR
13100 && ! TREE_SIDE_EFFECTS (arg0)
13101 && (0 != (tem = negate_expr (arg1)))
13102 && TREE_CODE (tem) == INTEGER_CST
13103 && !TREE_OVERFLOW (tem))
13104 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13105 build2 (GE_EXPR, type,
13106 TREE_OPERAND (arg0, 0), tem),
13107 build2 (LE_EXPR, type,
13108 TREE_OPERAND (arg0, 0), arg1));
13109
13110 /* Convert ABS_EXPR<x> >= 0 to true. */
13111 strict_overflow_p = false;
13112 if (code == GE_EXPR
13113 && (integer_zerop (arg1)
13114 || (! HONOR_NANS (arg0)
13115 && real_zerop (arg1)))
13116 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13117 {
13118 if (strict_overflow_p)
13119 fold_overflow_warning (("assuming signed overflow does not occur "
13120 "when simplifying comparison of "
13121 "absolute value and zero"),
13122 WARN_STRICT_OVERFLOW_CONDITIONAL);
13123 return omit_one_operand_loc (loc, type,
13124 constant_boolean_node (true, type),
13125 arg0);
13126 }
13127
13128 /* Convert ABS_EXPR<x> < 0 to false. */
13129 strict_overflow_p = false;
13130 if (code == LT_EXPR
13131 && (integer_zerop (arg1) || real_zerop (arg1))
13132 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13133 {
13134 if (strict_overflow_p)
13135 fold_overflow_warning (("assuming signed overflow does not occur "
13136 "when simplifying comparison of "
13137 "absolute value and zero"),
13138 WARN_STRICT_OVERFLOW_CONDITIONAL);
13139 return omit_one_operand_loc (loc, type,
13140 constant_boolean_node (false, type),
13141 arg0);
13142 }
13143
13144 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13145 and similarly for >= into !=. */
13146 if ((code == LT_EXPR || code == GE_EXPR)
13147 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13148 && TREE_CODE (arg1) == LSHIFT_EXPR
13149 && integer_onep (TREE_OPERAND (arg1, 0)))
13150 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13151 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13152 TREE_OPERAND (arg1, 1)),
13153 build_zero_cst (TREE_TYPE (arg0)));
13154
13155 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13156 otherwise Y might be >= # of bits in X's type and thus e.g.
13157 (unsigned char) (1 << Y) for Y 15 might be 0.
13158 If the cast is widening, then 1 << Y should have unsigned type,
13159 otherwise if Y is number of bits in the signed shift type minus 1,
13160 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13161 31 might be 0xffffffff80000000. */
13162 if ((code == LT_EXPR || code == GE_EXPR)
13163 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13164 && CONVERT_EXPR_P (arg1)
13165 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13166 && (element_precision (TREE_TYPE (arg1))
13167 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13168 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13169 || (element_precision (TREE_TYPE (arg1))
13170 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13171 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13172 {
13173 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13174 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13175 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13176 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13177 build_zero_cst (TREE_TYPE (arg0)));
13178 }
13179
13180 return NULL_TREE;
13181
13182 case UNORDERED_EXPR:
13183 case ORDERED_EXPR:
13184 case UNLT_EXPR:
13185 case UNLE_EXPR:
13186 case UNGT_EXPR:
13187 case UNGE_EXPR:
13188 case UNEQ_EXPR:
13189 case LTGT_EXPR:
13190 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13191 {
13192 t1 = fold_relational_const (code, type, arg0, arg1);
13193 if (t1 != NULL_TREE)
13194 return t1;
13195 }
13196
13197 /* If the first operand is NaN, the result is constant. */
13198 if (TREE_CODE (arg0) == REAL_CST
13199 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13200 && (code != LTGT_EXPR || ! flag_trapping_math))
13201 {
13202 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13203 ? integer_zero_node
13204 : integer_one_node;
13205 return omit_one_operand_loc (loc, type, t1, arg1);
13206 }
13207
13208 /* If the second operand is NaN, the result is constant. */
13209 if (TREE_CODE (arg1) == REAL_CST
13210 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13211 && (code != LTGT_EXPR || ! flag_trapping_math))
13212 {
13213 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13214 ? integer_zero_node
13215 : integer_one_node;
13216 return omit_one_operand_loc (loc, type, t1, arg0);
13217 }
13218
13219 /* Simplify unordered comparison of something with itself. */
13220 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13221 && operand_equal_p (arg0, arg1, 0))
13222 return constant_boolean_node (1, type);
13223
13224 if (code == LTGT_EXPR
13225 && !flag_trapping_math
13226 && operand_equal_p (arg0, arg1, 0))
13227 return constant_boolean_node (0, type);
13228
13229 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13230 {
13231 tree targ0 = strip_float_extensions (arg0);
13232 tree targ1 = strip_float_extensions (arg1);
13233 tree newtype = TREE_TYPE (targ0);
13234
13235 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13236 newtype = TREE_TYPE (targ1);
13237
13238 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13239 return fold_build2_loc (loc, code, type,
13240 fold_convert_loc (loc, newtype, targ0),
13241 fold_convert_loc (loc, newtype, targ1));
13242 }
13243
13244 return NULL_TREE;
13245
13246 case COMPOUND_EXPR:
13247 /* When pedantic, a compound expression can be neither an lvalue
13248 nor an integer constant expression. */
13249 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13250 return NULL_TREE;
13251 /* Don't let (0, 0) be null pointer constant. */
13252 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13253 : fold_convert_loc (loc, type, arg1);
13254 return pedantic_non_lvalue_loc (loc, tem);
13255
13256 case ASSERT_EXPR:
13257 /* An ASSERT_EXPR should never be passed to fold_binary. */
13258 gcc_unreachable ();
13259
13260 default:
13261 return NULL_TREE;
13262 } /* switch (code) */
13263 }
13264
13265 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13266 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13267 of GOTO_EXPR. */
13268
13269 static tree
13270 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13271 {
13272 switch (TREE_CODE (*tp))
13273 {
13274 case LABEL_EXPR:
13275 return *tp;
13276
13277 case GOTO_EXPR:
13278 *walk_subtrees = 0;
13279
13280 /* ... fall through ... */
13281
13282 default:
13283 return NULL_TREE;
13284 }
13285 }
13286
13287 /* Return whether the sub-tree ST contains a label which is accessible from
13288 outside the sub-tree. */
13289
13290 static bool
13291 contains_label_p (tree st)
13292 {
13293 return
13294 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13295 }
13296
13297 /* Fold a ternary expression of code CODE and type TYPE with operands
13298 OP0, OP1, and OP2. Return the folded expression if folding is
13299 successful. Otherwise, return NULL_TREE. */
13300
13301 tree
13302 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13303 tree op0, tree op1, tree op2)
13304 {
13305 tree tem;
13306 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13307 enum tree_code_class kind = TREE_CODE_CLASS (code);
13308
13309 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13310 && TREE_CODE_LENGTH (code) == 3);
13311
13312 /* If this is a commutative operation, and OP0 is a constant, move it
13313 to OP1 to reduce the number of tests below. */
13314 if (commutative_ternary_tree_code (code)
13315 && tree_swap_operands_p (op0, op1, true))
13316 return fold_build3_loc (loc, code, type, op1, op0, op2);
13317
13318 tem = generic_simplify (loc, code, type, op0, op1, op2);
13319 if (tem)
13320 return tem;
13321
13322 /* Strip any conversions that don't change the mode. This is safe
13323 for every expression, except for a comparison expression because
13324 its signedness is derived from its operands. So, in the latter
13325 case, only strip conversions that don't change the signedness.
13326
13327 Note that this is done as an internal manipulation within the
13328 constant folder, in order to find the simplest representation of
13329 the arguments so that their form can be studied. In any cases,
13330 the appropriate type conversions should be put back in the tree
13331 that will get out of the constant folder. */
13332 if (op0)
13333 {
13334 arg0 = op0;
13335 STRIP_NOPS (arg0);
13336 }
13337
13338 if (op1)
13339 {
13340 arg1 = op1;
13341 STRIP_NOPS (arg1);
13342 }
13343
13344 if (op2)
13345 {
13346 arg2 = op2;
13347 STRIP_NOPS (arg2);
13348 }
13349
13350 switch (code)
13351 {
13352 case COMPONENT_REF:
13353 if (TREE_CODE (arg0) == CONSTRUCTOR
13354 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13355 {
13356 unsigned HOST_WIDE_INT idx;
13357 tree field, value;
13358 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13359 if (field == arg1)
13360 return value;
13361 }
13362 return NULL_TREE;
13363
13364 case COND_EXPR:
13365 case VEC_COND_EXPR:
13366 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13367 so all simple results must be passed through pedantic_non_lvalue. */
13368 if (TREE_CODE (arg0) == INTEGER_CST)
13369 {
13370 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13371 tem = integer_zerop (arg0) ? op2 : op1;
13372 /* Only optimize constant conditions when the selected branch
13373 has the same type as the COND_EXPR. This avoids optimizing
13374 away "c ? x : throw", where the throw has a void type.
13375 Avoid throwing away that operand which contains label. */
13376 if ((!TREE_SIDE_EFFECTS (unused_op)
13377 || !contains_label_p (unused_op))
13378 && (! VOID_TYPE_P (TREE_TYPE (tem))
13379 || VOID_TYPE_P (type)))
13380 return pedantic_non_lvalue_loc (loc, tem);
13381 return NULL_TREE;
13382 }
13383 else if (TREE_CODE (arg0) == VECTOR_CST)
13384 {
13385 if ((TREE_CODE (arg1) == VECTOR_CST
13386 || TREE_CODE (arg1) == CONSTRUCTOR)
13387 && (TREE_CODE (arg2) == VECTOR_CST
13388 || TREE_CODE (arg2) == CONSTRUCTOR))
13389 {
13390 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13391 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13392 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13393 for (i = 0; i < nelts; i++)
13394 {
13395 tree val = VECTOR_CST_ELT (arg0, i);
13396 if (integer_all_onesp (val))
13397 sel[i] = i;
13398 else if (integer_zerop (val))
13399 sel[i] = nelts + i;
13400 else /* Currently unreachable. */
13401 return NULL_TREE;
13402 }
13403 tree t = fold_vec_perm (type, arg1, arg2, sel);
13404 if (t != NULL_TREE)
13405 return t;
13406 }
13407 }
13408
13409 /* If we have A op B ? A : C, we may be able to convert this to a
13410 simpler expression, depending on the operation and the values
13411 of B and C. Signed zeros prevent all of these transformations,
13412 for reasons given above each one.
13413
13414 Also try swapping the arguments and inverting the conditional. */
13415 if (COMPARISON_CLASS_P (arg0)
13416 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13417 arg1, TREE_OPERAND (arg0, 1))
13418 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13419 {
13420 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13421 if (tem)
13422 return tem;
13423 }
13424
13425 if (COMPARISON_CLASS_P (arg0)
13426 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13427 op2,
13428 TREE_OPERAND (arg0, 1))
13429 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13430 {
13431 location_t loc0 = expr_location_or (arg0, loc);
13432 tem = fold_invert_truthvalue (loc0, arg0);
13433 if (tem && COMPARISON_CLASS_P (tem))
13434 {
13435 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13436 if (tem)
13437 return tem;
13438 }
13439 }
13440
13441 /* If the second operand is simpler than the third, swap them
13442 since that produces better jump optimization results. */
13443 if (truth_value_p (TREE_CODE (arg0))
13444 && tree_swap_operands_p (op1, op2, false))
13445 {
13446 location_t loc0 = expr_location_or (arg0, loc);
13447 /* See if this can be inverted. If it can't, possibly because
13448 it was a floating-point inequality comparison, don't do
13449 anything. */
13450 tem = fold_invert_truthvalue (loc0, arg0);
13451 if (tem)
13452 return fold_build3_loc (loc, code, type, tem, op2, op1);
13453 }
13454
13455 /* Convert A ? 1 : 0 to simply A. */
13456 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13457 : (integer_onep (op1)
13458 && !VECTOR_TYPE_P (type)))
13459 && integer_zerop (op2)
13460 /* If we try to convert OP0 to our type, the
13461 call to fold will try to move the conversion inside
13462 a COND, which will recurse. In that case, the COND_EXPR
13463 is probably the best choice, so leave it alone. */
13464 && type == TREE_TYPE (arg0))
13465 return pedantic_non_lvalue_loc (loc, arg0);
13466
13467 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13468 over COND_EXPR in cases such as floating point comparisons. */
13469 if (integer_zerop (op1)
13470 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13471 : (integer_onep (op2)
13472 && !VECTOR_TYPE_P (type)))
13473 && truth_value_p (TREE_CODE (arg0)))
13474 return pedantic_non_lvalue_loc (loc,
13475 fold_convert_loc (loc, type,
13476 invert_truthvalue_loc (loc,
13477 arg0)));
13478
13479 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13480 if (TREE_CODE (arg0) == LT_EXPR
13481 && integer_zerop (TREE_OPERAND (arg0, 1))
13482 && integer_zerop (op2)
13483 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13484 {
13485 /* sign_bit_p looks through both zero and sign extensions,
13486 but for this optimization only sign extensions are
13487 usable. */
13488 tree tem2 = TREE_OPERAND (arg0, 0);
13489 while (tem != tem2)
13490 {
13491 if (TREE_CODE (tem2) != NOP_EXPR
13492 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13493 {
13494 tem = NULL_TREE;
13495 break;
13496 }
13497 tem2 = TREE_OPERAND (tem2, 0);
13498 }
13499 /* sign_bit_p only checks ARG1 bits within A's precision.
13500 If <sign bit of A> has wider type than A, bits outside
13501 of A's precision in <sign bit of A> need to be checked.
13502 If they are all 0, this optimization needs to be done
13503 in unsigned A's type, if they are all 1 in signed A's type,
13504 otherwise this can't be done. */
13505 if (tem
13506 && TYPE_PRECISION (TREE_TYPE (tem))
13507 < TYPE_PRECISION (TREE_TYPE (arg1))
13508 && TYPE_PRECISION (TREE_TYPE (tem))
13509 < TYPE_PRECISION (type))
13510 {
13511 int inner_width, outer_width;
13512 tree tem_type;
13513
13514 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13515 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13516 if (outer_width > TYPE_PRECISION (type))
13517 outer_width = TYPE_PRECISION (type);
13518
13519 wide_int mask = wi::shifted_mask
13520 (inner_width, outer_width - inner_width, false,
13521 TYPE_PRECISION (TREE_TYPE (arg1)));
13522
13523 wide_int common = mask & arg1;
13524 if (common == mask)
13525 {
13526 tem_type = signed_type_for (TREE_TYPE (tem));
13527 tem = fold_convert_loc (loc, tem_type, tem);
13528 }
13529 else if (common == 0)
13530 {
13531 tem_type = unsigned_type_for (TREE_TYPE (tem));
13532 tem = fold_convert_loc (loc, tem_type, tem);
13533 }
13534 else
13535 tem = NULL;
13536 }
13537
13538 if (tem)
13539 return
13540 fold_convert_loc (loc, type,
13541 fold_build2_loc (loc, BIT_AND_EXPR,
13542 TREE_TYPE (tem), tem,
13543 fold_convert_loc (loc,
13544 TREE_TYPE (tem),
13545 arg1)));
13546 }
13547
13548 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13549 already handled above. */
13550 if (TREE_CODE (arg0) == BIT_AND_EXPR
13551 && integer_onep (TREE_OPERAND (arg0, 1))
13552 && integer_zerop (op2)
13553 && integer_pow2p (arg1))
13554 {
13555 tree tem = TREE_OPERAND (arg0, 0);
13556 STRIP_NOPS (tem);
13557 if (TREE_CODE (tem) == RSHIFT_EXPR
13558 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13559 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13560 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13561 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13562 TREE_OPERAND (tem, 0), arg1);
13563 }
13564
13565 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13566 is probably obsolete because the first operand should be a
13567 truth value (that's why we have the two cases above), but let's
13568 leave it in until we can confirm this for all front-ends. */
13569 if (integer_zerop (op2)
13570 && TREE_CODE (arg0) == NE_EXPR
13571 && integer_zerop (TREE_OPERAND (arg0, 1))
13572 && integer_pow2p (arg1)
13573 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13574 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13575 arg1, OEP_ONLY_CONST))
13576 return pedantic_non_lvalue_loc (loc,
13577 fold_convert_loc (loc, type,
13578 TREE_OPERAND (arg0, 0)));
13579
13580 /* Disable the transformations below for vectors, since
13581 fold_binary_op_with_conditional_arg may undo them immediately,
13582 yielding an infinite loop. */
13583 if (code == VEC_COND_EXPR)
13584 return NULL_TREE;
13585
13586 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13587 if (integer_zerop (op2)
13588 && truth_value_p (TREE_CODE (arg0))
13589 && truth_value_p (TREE_CODE (arg1))
13590 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13591 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13592 : TRUTH_ANDIF_EXPR,
13593 type, fold_convert_loc (loc, type, arg0), arg1);
13594
13595 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13596 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13597 && truth_value_p (TREE_CODE (arg0))
13598 && truth_value_p (TREE_CODE (arg1))
13599 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13600 {
13601 location_t loc0 = expr_location_or (arg0, loc);
13602 /* Only perform transformation if ARG0 is easily inverted. */
13603 tem = fold_invert_truthvalue (loc0, arg0);
13604 if (tem)
13605 return fold_build2_loc (loc, code == VEC_COND_EXPR
13606 ? BIT_IOR_EXPR
13607 : TRUTH_ORIF_EXPR,
13608 type, fold_convert_loc (loc, type, tem),
13609 arg1);
13610 }
13611
13612 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13613 if (integer_zerop (arg1)
13614 && truth_value_p (TREE_CODE (arg0))
13615 && truth_value_p (TREE_CODE (op2))
13616 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13617 {
13618 location_t loc0 = expr_location_or (arg0, loc);
13619 /* Only perform transformation if ARG0 is easily inverted. */
13620 tem = fold_invert_truthvalue (loc0, arg0);
13621 if (tem)
13622 return fold_build2_loc (loc, code == VEC_COND_EXPR
13623 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13624 type, fold_convert_loc (loc, type, tem),
13625 op2);
13626 }
13627
13628 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13629 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13630 && truth_value_p (TREE_CODE (arg0))
13631 && truth_value_p (TREE_CODE (op2))
13632 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13633 return fold_build2_loc (loc, code == VEC_COND_EXPR
13634 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13635 type, fold_convert_loc (loc, type, arg0), op2);
13636
13637 return NULL_TREE;
13638
13639 case CALL_EXPR:
13640 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13641 of fold_ternary on them. */
13642 gcc_unreachable ();
13643
13644 case BIT_FIELD_REF:
13645 if ((TREE_CODE (arg0) == VECTOR_CST
13646 || (TREE_CODE (arg0) == CONSTRUCTOR
13647 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13648 && (type == TREE_TYPE (TREE_TYPE (arg0))
13649 || (TREE_CODE (type) == VECTOR_TYPE
13650 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13651 {
13652 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13653 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13654 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13655 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13656
13657 if (n != 0
13658 && (idx % width) == 0
13659 && (n % width) == 0
13660 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13661 {
13662 idx = idx / width;
13663 n = n / width;
13664
13665 if (TREE_CODE (arg0) == VECTOR_CST)
13666 {
13667 if (n == 1)
13668 return VECTOR_CST_ELT (arg0, idx);
13669
13670 tree *vals = XALLOCAVEC (tree, n);
13671 for (unsigned i = 0; i < n; ++i)
13672 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13673 return build_vector (type, vals);
13674 }
13675
13676 /* Constructor elements can be subvectors. */
13677 unsigned HOST_WIDE_INT k = 1;
13678 if (CONSTRUCTOR_NELTS (arg0) != 0)
13679 {
13680 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13681 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13682 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13683 }
13684
13685 /* We keep an exact subset of the constructor elements. */
13686 if ((idx % k) == 0 && (n % k) == 0)
13687 {
13688 if (CONSTRUCTOR_NELTS (arg0) == 0)
13689 return build_constructor (type, NULL);
13690 idx /= k;
13691 n /= k;
13692 if (n == 1)
13693 {
13694 if (idx < CONSTRUCTOR_NELTS (arg0))
13695 return CONSTRUCTOR_ELT (arg0, idx)->value;
13696 return build_zero_cst (type);
13697 }
13698
13699 vec<constructor_elt, va_gc> *vals;
13700 vec_alloc (vals, n);
13701 for (unsigned i = 0;
13702 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13703 ++i)
13704 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13705 CONSTRUCTOR_ELT
13706 (arg0, idx + i)->value);
13707 return build_constructor (type, vals);
13708 }
13709 /* The bitfield references a single constructor element. */
13710 else if (idx + n <= (idx / k + 1) * k)
13711 {
13712 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13713 return build_zero_cst (type);
13714 else if (n == k)
13715 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13716 else
13717 return fold_build3_loc (loc, code, type,
13718 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13719 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13720 }
13721 }
13722 }
13723
13724 /* A bit-field-ref that referenced the full argument can be stripped. */
13725 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13726 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13727 && integer_zerop (op2))
13728 return fold_convert_loc (loc, type, arg0);
13729
13730 /* On constants we can use native encode/interpret to constant
13731 fold (nearly) all BIT_FIELD_REFs. */
13732 if (CONSTANT_CLASS_P (arg0)
13733 && can_native_interpret_type_p (type)
13734 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13735 /* This limitation should not be necessary, we just need to
13736 round this up to mode size. */
13737 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13738 /* Need bit-shifting of the buffer to relax the following. */
13739 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13740 {
13741 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13742 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13743 unsigned HOST_WIDE_INT clen;
13744 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13745 /* ??? We cannot tell native_encode_expr to start at
13746 some random byte only. So limit us to a reasonable amount
13747 of work. */
13748 if (clen <= 4096)
13749 {
13750 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13751 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13752 if (len > 0
13753 && len * BITS_PER_UNIT >= bitpos + bitsize)
13754 {
13755 tree v = native_interpret_expr (type,
13756 b + bitpos / BITS_PER_UNIT,
13757 bitsize / BITS_PER_UNIT);
13758 if (v)
13759 return v;
13760 }
13761 }
13762 }
13763
13764 return NULL_TREE;
13765
13766 case FMA_EXPR:
13767 /* For integers we can decompose the FMA if possible. */
13768 if (TREE_CODE (arg0) == INTEGER_CST
13769 && TREE_CODE (arg1) == INTEGER_CST)
13770 return fold_build2_loc (loc, PLUS_EXPR, type,
13771 const_binop (MULT_EXPR, arg0, arg1), arg2);
13772 if (integer_zerop (arg2))
13773 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13774
13775 return fold_fma (loc, type, arg0, arg1, arg2);
13776
13777 case VEC_PERM_EXPR:
13778 if (TREE_CODE (arg2) == VECTOR_CST)
13779 {
13780 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13781 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13782 unsigned char *sel2 = sel + nelts;
13783 bool need_mask_canon = false;
13784 bool need_mask_canon2 = false;
13785 bool all_in_vec0 = true;
13786 bool all_in_vec1 = true;
13787 bool maybe_identity = true;
13788 bool single_arg = (op0 == op1);
13789 bool changed = false;
13790
13791 mask2 = 2 * nelts - 1;
13792 mask = single_arg ? (nelts - 1) : mask2;
13793 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13794 for (i = 0; i < nelts; i++)
13795 {
13796 tree val = VECTOR_CST_ELT (arg2, i);
13797 if (TREE_CODE (val) != INTEGER_CST)
13798 return NULL_TREE;
13799
13800 /* Make sure that the perm value is in an acceptable
13801 range. */
13802 wide_int t = val;
13803 need_mask_canon |= wi::gtu_p (t, mask);
13804 need_mask_canon2 |= wi::gtu_p (t, mask2);
13805 sel[i] = t.to_uhwi () & mask;
13806 sel2[i] = t.to_uhwi () & mask2;
13807
13808 if (sel[i] < nelts)
13809 all_in_vec1 = false;
13810 else
13811 all_in_vec0 = false;
13812
13813 if ((sel[i] & (nelts-1)) != i)
13814 maybe_identity = false;
13815 }
13816
13817 if (maybe_identity)
13818 {
13819 if (all_in_vec0)
13820 return op0;
13821 if (all_in_vec1)
13822 return op1;
13823 }
13824
13825 if (all_in_vec0)
13826 op1 = op0;
13827 else if (all_in_vec1)
13828 {
13829 op0 = op1;
13830 for (i = 0; i < nelts; i++)
13831 sel[i] -= nelts;
13832 need_mask_canon = true;
13833 }
13834
13835 if ((TREE_CODE (op0) == VECTOR_CST
13836 || TREE_CODE (op0) == CONSTRUCTOR)
13837 && (TREE_CODE (op1) == VECTOR_CST
13838 || TREE_CODE (op1) == CONSTRUCTOR))
13839 {
13840 tree t = fold_vec_perm (type, op0, op1, sel);
13841 if (t != NULL_TREE)
13842 return t;
13843 }
13844
13845 if (op0 == op1 && !single_arg)
13846 changed = true;
13847
13848 /* Some targets are deficient and fail to expand a single
13849 argument permutation while still allowing an equivalent
13850 2-argument version. */
13851 if (need_mask_canon && arg2 == op2
13852 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13853 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13854 {
13855 need_mask_canon = need_mask_canon2;
13856 sel = sel2;
13857 }
13858
13859 if (need_mask_canon && arg2 == op2)
13860 {
13861 tree *tsel = XALLOCAVEC (tree, nelts);
13862 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13863 for (i = 0; i < nelts; i++)
13864 tsel[i] = build_int_cst (eltype, sel[i]);
13865 op2 = build_vector (TREE_TYPE (arg2), tsel);
13866 changed = true;
13867 }
13868
13869 if (changed)
13870 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13871 }
13872 return NULL_TREE;
13873
13874 default:
13875 return NULL_TREE;
13876 } /* switch (code) */
13877 }
13878
13879 /* Perform constant folding and related simplification of EXPR.
13880 The related simplifications include x*1 => x, x*0 => 0, etc.,
13881 and application of the associative law.
13882 NOP_EXPR conversions may be removed freely (as long as we
13883 are careful not to change the type of the overall expression).
13884 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13885 but we can constant-fold them if they have constant operands. */
13886
13887 #ifdef ENABLE_FOLD_CHECKING
13888 # define fold(x) fold_1 (x)
13889 static tree fold_1 (tree);
13890 static
13891 #endif
13892 tree
13893 fold (tree expr)
13894 {
13895 const tree t = expr;
13896 enum tree_code code = TREE_CODE (t);
13897 enum tree_code_class kind = TREE_CODE_CLASS (code);
13898 tree tem;
13899 location_t loc = EXPR_LOCATION (expr);
13900
13901 /* Return right away if a constant. */
13902 if (kind == tcc_constant)
13903 return t;
13904
13905 /* CALL_EXPR-like objects with variable numbers of operands are
13906 treated specially. */
13907 if (kind == tcc_vl_exp)
13908 {
13909 if (code == CALL_EXPR)
13910 {
13911 tem = fold_call_expr (loc, expr, false);
13912 return tem ? tem : expr;
13913 }
13914 return expr;
13915 }
13916
13917 if (IS_EXPR_CODE_CLASS (kind))
13918 {
13919 tree type = TREE_TYPE (t);
13920 tree op0, op1, op2;
13921
13922 switch (TREE_CODE_LENGTH (code))
13923 {
13924 case 1:
13925 op0 = TREE_OPERAND (t, 0);
13926 tem = fold_unary_loc (loc, code, type, op0);
13927 return tem ? tem : expr;
13928 case 2:
13929 op0 = TREE_OPERAND (t, 0);
13930 op1 = TREE_OPERAND (t, 1);
13931 tem = fold_binary_loc (loc, code, type, op0, op1);
13932 return tem ? tem : expr;
13933 case 3:
13934 op0 = TREE_OPERAND (t, 0);
13935 op1 = TREE_OPERAND (t, 1);
13936 op2 = TREE_OPERAND (t, 2);
13937 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13938 return tem ? tem : expr;
13939 default:
13940 break;
13941 }
13942 }
13943
13944 switch (code)
13945 {
13946 case ARRAY_REF:
13947 {
13948 tree op0 = TREE_OPERAND (t, 0);
13949 tree op1 = TREE_OPERAND (t, 1);
13950
13951 if (TREE_CODE (op1) == INTEGER_CST
13952 && TREE_CODE (op0) == CONSTRUCTOR
13953 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13954 {
13955 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13956 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13957 unsigned HOST_WIDE_INT begin = 0;
13958
13959 /* Find a matching index by means of a binary search. */
13960 while (begin != end)
13961 {
13962 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13963 tree index = (*elts)[middle].index;
13964
13965 if (TREE_CODE (index) == INTEGER_CST
13966 && tree_int_cst_lt (index, op1))
13967 begin = middle + 1;
13968 else if (TREE_CODE (index) == INTEGER_CST
13969 && tree_int_cst_lt (op1, index))
13970 end = middle;
13971 else if (TREE_CODE (index) == RANGE_EXPR
13972 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13973 begin = middle + 1;
13974 else if (TREE_CODE (index) == RANGE_EXPR
13975 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13976 end = middle;
13977 else
13978 return (*elts)[middle].value;
13979 }
13980 }
13981
13982 return t;
13983 }
13984
13985 /* Return a VECTOR_CST if possible. */
13986 case CONSTRUCTOR:
13987 {
13988 tree type = TREE_TYPE (t);
13989 if (TREE_CODE (type) != VECTOR_TYPE)
13990 return t;
13991
13992 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13993 unsigned HOST_WIDE_INT idx, pos = 0;
13994 tree value;
13995
13996 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13997 {
13998 if (!CONSTANT_CLASS_P (value))
13999 return t;
14000 if (TREE_CODE (value) == VECTOR_CST)
14001 {
14002 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
14003 vec[pos++] = VECTOR_CST_ELT (value, i);
14004 }
14005 else
14006 vec[pos++] = value;
14007 }
14008 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
14009 vec[pos] = build_zero_cst (TREE_TYPE (type));
14010
14011 return build_vector (type, vec);
14012 }
14013
14014 case CONST_DECL:
14015 return fold (DECL_INITIAL (t));
14016
14017 default:
14018 return t;
14019 } /* switch (code) */
14020 }
14021
14022 #ifdef ENABLE_FOLD_CHECKING
14023 #undef fold
14024
14025 static void fold_checksum_tree (const_tree, struct md5_ctx *,
14026 hash_table<pointer_hash<const tree_node> > *);
14027 static void fold_check_failed (const_tree, const_tree);
14028 void print_fold_checksum (const_tree);
14029
14030 /* When --enable-checking=fold, compute a digest of expr before
14031 and after actual fold call to see if fold did not accidentally
14032 change original expr. */
14033
14034 tree
14035 fold (tree expr)
14036 {
14037 tree ret;
14038 struct md5_ctx ctx;
14039 unsigned char checksum_before[16], checksum_after[16];
14040 hash_table<pointer_hash<const tree_node> > ht (32);
14041
14042 md5_init_ctx (&ctx);
14043 fold_checksum_tree (expr, &ctx, &ht);
14044 md5_finish_ctx (&ctx, checksum_before);
14045 ht.empty ();
14046
14047 ret = fold_1 (expr);
14048
14049 md5_init_ctx (&ctx);
14050 fold_checksum_tree (expr, &ctx, &ht);
14051 md5_finish_ctx (&ctx, checksum_after);
14052
14053 if (memcmp (checksum_before, checksum_after, 16))
14054 fold_check_failed (expr, ret);
14055
14056 return ret;
14057 }
14058
14059 void
14060 print_fold_checksum (const_tree expr)
14061 {
14062 struct md5_ctx ctx;
14063 unsigned char checksum[16], cnt;
14064 hash_table<pointer_hash<const tree_node> > ht (32);
14065
14066 md5_init_ctx (&ctx);
14067 fold_checksum_tree (expr, &ctx, &ht);
14068 md5_finish_ctx (&ctx, checksum);
14069 for (cnt = 0; cnt < 16; ++cnt)
14070 fprintf (stderr, "%02x", checksum[cnt]);
14071 putc ('\n', stderr);
14072 }
14073
14074 static void
14075 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14076 {
14077 internal_error ("fold check: original tree changed by fold");
14078 }
14079
14080 static void
14081 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14082 hash_table<pointer_hash <const tree_node> > *ht)
14083 {
14084 const tree_node **slot;
14085 enum tree_code code;
14086 union tree_node buf;
14087 int i, len;
14088
14089 recursive_label:
14090 if (expr == NULL)
14091 return;
14092 slot = ht->find_slot (expr, INSERT);
14093 if (*slot != NULL)
14094 return;
14095 *slot = expr;
14096 code = TREE_CODE (expr);
14097 if (TREE_CODE_CLASS (code) == tcc_declaration
14098 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14099 {
14100 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14101 memcpy ((char *) &buf, expr, tree_size (expr));
14102 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14103 buf.decl_with_vis.symtab_node = NULL;
14104 expr = (tree) &buf;
14105 }
14106 else if (TREE_CODE_CLASS (code) == tcc_type
14107 && (TYPE_POINTER_TO (expr)
14108 || TYPE_REFERENCE_TO (expr)
14109 || TYPE_CACHED_VALUES_P (expr)
14110 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14111 || TYPE_NEXT_VARIANT (expr)))
14112 {
14113 /* Allow these fields to be modified. */
14114 tree tmp;
14115 memcpy ((char *) &buf, expr, tree_size (expr));
14116 expr = tmp = (tree) &buf;
14117 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14118 TYPE_POINTER_TO (tmp) = NULL;
14119 TYPE_REFERENCE_TO (tmp) = NULL;
14120 TYPE_NEXT_VARIANT (tmp) = NULL;
14121 if (TYPE_CACHED_VALUES_P (tmp))
14122 {
14123 TYPE_CACHED_VALUES_P (tmp) = 0;
14124 TYPE_CACHED_VALUES (tmp) = NULL;
14125 }
14126 }
14127 md5_process_bytes (expr, tree_size (expr), ctx);
14128 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14129 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14130 if (TREE_CODE_CLASS (code) != tcc_type
14131 && TREE_CODE_CLASS (code) != tcc_declaration
14132 && code != TREE_LIST
14133 && code != SSA_NAME
14134 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14135 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14136 switch (TREE_CODE_CLASS (code))
14137 {
14138 case tcc_constant:
14139 switch (code)
14140 {
14141 case STRING_CST:
14142 md5_process_bytes (TREE_STRING_POINTER (expr),
14143 TREE_STRING_LENGTH (expr), ctx);
14144 break;
14145 case COMPLEX_CST:
14146 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14147 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14148 break;
14149 case VECTOR_CST:
14150 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14151 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14152 break;
14153 default:
14154 break;
14155 }
14156 break;
14157 case tcc_exceptional:
14158 switch (code)
14159 {
14160 case TREE_LIST:
14161 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14162 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14163 expr = TREE_CHAIN (expr);
14164 goto recursive_label;
14165 break;
14166 case TREE_VEC:
14167 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14168 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14169 break;
14170 default:
14171 break;
14172 }
14173 break;
14174 case tcc_expression:
14175 case tcc_reference:
14176 case tcc_comparison:
14177 case tcc_unary:
14178 case tcc_binary:
14179 case tcc_statement:
14180 case tcc_vl_exp:
14181 len = TREE_OPERAND_LENGTH (expr);
14182 for (i = 0; i < len; ++i)
14183 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14184 break;
14185 case tcc_declaration:
14186 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14187 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14188 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14189 {
14190 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14191 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14192 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14193 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14194 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14195 }
14196
14197 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14198 {
14199 if (TREE_CODE (expr) == FUNCTION_DECL)
14200 {
14201 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14202 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14203 }
14204 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14205 }
14206 break;
14207 case tcc_type:
14208 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14209 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14210 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14211 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14212 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14213 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14214 if (INTEGRAL_TYPE_P (expr)
14215 || SCALAR_FLOAT_TYPE_P (expr))
14216 {
14217 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14218 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14219 }
14220 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14221 if (TREE_CODE (expr) == RECORD_TYPE
14222 || TREE_CODE (expr) == UNION_TYPE
14223 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14224 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14225 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14226 break;
14227 default:
14228 break;
14229 }
14230 }
14231
14232 /* Helper function for outputting the checksum of a tree T. When
14233 debugging with gdb, you can "define mynext" to be "next" followed
14234 by "call debug_fold_checksum (op0)", then just trace down till the
14235 outputs differ. */
14236
14237 DEBUG_FUNCTION void
14238 debug_fold_checksum (const_tree t)
14239 {
14240 int i;
14241 unsigned char checksum[16];
14242 struct md5_ctx ctx;
14243 hash_table<pointer_hash<const tree_node> > ht (32);
14244
14245 md5_init_ctx (&ctx);
14246 fold_checksum_tree (t, &ctx, &ht);
14247 md5_finish_ctx (&ctx, checksum);
14248 ht.empty ();
14249
14250 for (i = 0; i < 16; i++)
14251 fprintf (stderr, "%d ", checksum[i]);
14252
14253 fprintf (stderr, "\n");
14254 }
14255
14256 #endif
14257
14258 /* Fold a unary tree expression with code CODE of type TYPE with an
14259 operand OP0. LOC is the location of the resulting expression.
14260 Return a folded expression if successful. Otherwise, return a tree
14261 expression with code CODE of type TYPE with an operand OP0. */
14262
14263 tree
14264 fold_build1_stat_loc (location_t loc,
14265 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14266 {
14267 tree tem;
14268 #ifdef ENABLE_FOLD_CHECKING
14269 unsigned char checksum_before[16], checksum_after[16];
14270 struct md5_ctx ctx;
14271 hash_table<pointer_hash<const tree_node> > ht (32);
14272
14273 md5_init_ctx (&ctx);
14274 fold_checksum_tree (op0, &ctx, &ht);
14275 md5_finish_ctx (&ctx, checksum_before);
14276 ht.empty ();
14277 #endif
14278
14279 tem = fold_unary_loc (loc, code, type, op0);
14280 if (!tem)
14281 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14282
14283 #ifdef ENABLE_FOLD_CHECKING
14284 md5_init_ctx (&ctx);
14285 fold_checksum_tree (op0, &ctx, &ht);
14286 md5_finish_ctx (&ctx, checksum_after);
14287
14288 if (memcmp (checksum_before, checksum_after, 16))
14289 fold_check_failed (op0, tem);
14290 #endif
14291 return tem;
14292 }
14293
14294 /* Fold a binary tree expression with code CODE of type TYPE with
14295 operands OP0 and OP1. LOC is the location of the resulting
14296 expression. Return a folded expression if successful. Otherwise,
14297 return a tree expression with code CODE of type TYPE with operands
14298 OP0 and OP1. */
14299
14300 tree
14301 fold_build2_stat_loc (location_t loc,
14302 enum tree_code code, tree type, tree op0, tree op1
14303 MEM_STAT_DECL)
14304 {
14305 tree tem;
14306 #ifdef ENABLE_FOLD_CHECKING
14307 unsigned char checksum_before_op0[16],
14308 checksum_before_op1[16],
14309 checksum_after_op0[16],
14310 checksum_after_op1[16];
14311 struct md5_ctx ctx;
14312 hash_table<pointer_hash<const tree_node> > ht (32);
14313
14314 md5_init_ctx (&ctx);
14315 fold_checksum_tree (op0, &ctx, &ht);
14316 md5_finish_ctx (&ctx, checksum_before_op0);
14317 ht.empty ();
14318
14319 md5_init_ctx (&ctx);
14320 fold_checksum_tree (op1, &ctx, &ht);
14321 md5_finish_ctx (&ctx, checksum_before_op1);
14322 ht.empty ();
14323 #endif
14324
14325 tem = fold_binary_loc (loc, code, type, op0, op1);
14326 if (!tem)
14327 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14328
14329 #ifdef ENABLE_FOLD_CHECKING
14330 md5_init_ctx (&ctx);
14331 fold_checksum_tree (op0, &ctx, &ht);
14332 md5_finish_ctx (&ctx, checksum_after_op0);
14333 ht.empty ();
14334
14335 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14336 fold_check_failed (op0, tem);
14337
14338 md5_init_ctx (&ctx);
14339 fold_checksum_tree (op1, &ctx, &ht);
14340 md5_finish_ctx (&ctx, checksum_after_op1);
14341
14342 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14343 fold_check_failed (op1, tem);
14344 #endif
14345 return tem;
14346 }
14347
14348 /* Fold a ternary tree expression with code CODE of type TYPE with
14349 operands OP0, OP1, and OP2. Return a folded expression if
14350 successful. Otherwise, return a tree expression with code CODE of
14351 type TYPE with operands OP0, OP1, and OP2. */
14352
14353 tree
14354 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14355 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14356 {
14357 tree tem;
14358 #ifdef ENABLE_FOLD_CHECKING
14359 unsigned char checksum_before_op0[16],
14360 checksum_before_op1[16],
14361 checksum_before_op2[16],
14362 checksum_after_op0[16],
14363 checksum_after_op1[16],
14364 checksum_after_op2[16];
14365 struct md5_ctx ctx;
14366 hash_table<pointer_hash<const tree_node> > ht (32);
14367
14368 md5_init_ctx (&ctx);
14369 fold_checksum_tree (op0, &ctx, &ht);
14370 md5_finish_ctx (&ctx, checksum_before_op0);
14371 ht.empty ();
14372
14373 md5_init_ctx (&ctx);
14374 fold_checksum_tree (op1, &ctx, &ht);
14375 md5_finish_ctx (&ctx, checksum_before_op1);
14376 ht.empty ();
14377
14378 md5_init_ctx (&ctx);
14379 fold_checksum_tree (op2, &ctx, &ht);
14380 md5_finish_ctx (&ctx, checksum_before_op2);
14381 ht.empty ();
14382 #endif
14383
14384 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14385 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14386 if (!tem)
14387 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14388
14389 #ifdef ENABLE_FOLD_CHECKING
14390 md5_init_ctx (&ctx);
14391 fold_checksum_tree (op0, &ctx, &ht);
14392 md5_finish_ctx (&ctx, checksum_after_op0);
14393 ht.empty ();
14394
14395 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14396 fold_check_failed (op0, tem);
14397
14398 md5_init_ctx (&ctx);
14399 fold_checksum_tree (op1, &ctx, &ht);
14400 md5_finish_ctx (&ctx, checksum_after_op1);
14401 ht.empty ();
14402
14403 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14404 fold_check_failed (op1, tem);
14405
14406 md5_init_ctx (&ctx);
14407 fold_checksum_tree (op2, &ctx, &ht);
14408 md5_finish_ctx (&ctx, checksum_after_op2);
14409
14410 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14411 fold_check_failed (op2, tem);
14412 #endif
14413 return tem;
14414 }
14415
14416 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14417 arguments in ARGARRAY, and a null static chain.
14418 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14419 of type TYPE from the given operands as constructed by build_call_array. */
14420
14421 tree
14422 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14423 int nargs, tree *argarray)
14424 {
14425 tree tem;
14426 #ifdef ENABLE_FOLD_CHECKING
14427 unsigned char checksum_before_fn[16],
14428 checksum_before_arglist[16],
14429 checksum_after_fn[16],
14430 checksum_after_arglist[16];
14431 struct md5_ctx ctx;
14432 hash_table<pointer_hash<const tree_node> > ht (32);
14433 int i;
14434
14435 md5_init_ctx (&ctx);
14436 fold_checksum_tree (fn, &ctx, &ht);
14437 md5_finish_ctx (&ctx, checksum_before_fn);
14438 ht.empty ();
14439
14440 md5_init_ctx (&ctx);
14441 for (i = 0; i < nargs; i++)
14442 fold_checksum_tree (argarray[i], &ctx, &ht);
14443 md5_finish_ctx (&ctx, checksum_before_arglist);
14444 ht.empty ();
14445 #endif
14446
14447 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14448 if (!tem)
14449 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14450
14451 #ifdef ENABLE_FOLD_CHECKING
14452 md5_init_ctx (&ctx);
14453 fold_checksum_tree (fn, &ctx, &ht);
14454 md5_finish_ctx (&ctx, checksum_after_fn);
14455 ht.empty ();
14456
14457 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14458 fold_check_failed (fn, tem);
14459
14460 md5_init_ctx (&ctx);
14461 for (i = 0; i < nargs; i++)
14462 fold_checksum_tree (argarray[i], &ctx, &ht);
14463 md5_finish_ctx (&ctx, checksum_after_arglist);
14464
14465 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14466 fold_check_failed (NULL_TREE, tem);
14467 #endif
14468 return tem;
14469 }
14470
14471 /* Perform constant folding and related simplification of initializer
14472 expression EXPR. These behave identically to "fold_buildN" but ignore
14473 potential run-time traps and exceptions that fold must preserve. */
14474
14475 #define START_FOLD_INIT \
14476 int saved_signaling_nans = flag_signaling_nans;\
14477 int saved_trapping_math = flag_trapping_math;\
14478 int saved_rounding_math = flag_rounding_math;\
14479 int saved_trapv = flag_trapv;\
14480 int saved_folding_initializer = folding_initializer;\
14481 flag_signaling_nans = 0;\
14482 flag_trapping_math = 0;\
14483 flag_rounding_math = 0;\
14484 flag_trapv = 0;\
14485 folding_initializer = 1;
14486
14487 #define END_FOLD_INIT \
14488 flag_signaling_nans = saved_signaling_nans;\
14489 flag_trapping_math = saved_trapping_math;\
14490 flag_rounding_math = saved_rounding_math;\
14491 flag_trapv = saved_trapv;\
14492 folding_initializer = saved_folding_initializer;
14493
14494 tree
14495 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14496 tree type, tree op)
14497 {
14498 tree result;
14499 START_FOLD_INIT;
14500
14501 result = fold_build1_loc (loc, code, type, op);
14502
14503 END_FOLD_INIT;
14504 return result;
14505 }
14506
14507 tree
14508 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14509 tree type, tree op0, tree op1)
14510 {
14511 tree result;
14512 START_FOLD_INIT;
14513
14514 result = fold_build2_loc (loc, code, type, op0, op1);
14515
14516 END_FOLD_INIT;
14517 return result;
14518 }
14519
14520 tree
14521 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14522 int nargs, tree *argarray)
14523 {
14524 tree result;
14525 START_FOLD_INIT;
14526
14527 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14528
14529 END_FOLD_INIT;
14530 return result;
14531 }
14532
14533 #undef START_FOLD_INIT
14534 #undef END_FOLD_INIT
14535
14536 /* Determine if first argument is a multiple of second argument. Return 0 if
14537 it is not, or we cannot easily determined it to be.
14538
14539 An example of the sort of thing we care about (at this point; this routine
14540 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14541 fold cases do now) is discovering that
14542
14543 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14544
14545 is a multiple of
14546
14547 SAVE_EXPR (J * 8)
14548
14549 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14550
14551 This code also handles discovering that
14552
14553 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14554
14555 is a multiple of 8 so we don't have to worry about dealing with a
14556 possible remainder.
14557
14558 Note that we *look* inside a SAVE_EXPR only to determine how it was
14559 calculated; it is not safe for fold to do much of anything else with the
14560 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14561 at run time. For example, the latter example above *cannot* be implemented
14562 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14563 evaluation time of the original SAVE_EXPR is not necessarily the same at
14564 the time the new expression is evaluated. The only optimization of this
14565 sort that would be valid is changing
14566
14567 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14568
14569 divided by 8 to
14570
14571 SAVE_EXPR (I) * SAVE_EXPR (J)
14572
14573 (where the same SAVE_EXPR (J) is used in the original and the
14574 transformed version). */
14575
14576 int
14577 multiple_of_p (tree type, const_tree top, const_tree bottom)
14578 {
14579 if (operand_equal_p (top, bottom, 0))
14580 return 1;
14581
14582 if (TREE_CODE (type) != INTEGER_TYPE)
14583 return 0;
14584
14585 switch (TREE_CODE (top))
14586 {
14587 case BIT_AND_EXPR:
14588 /* Bitwise and provides a power of two multiple. If the mask is
14589 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14590 if (!integer_pow2p (bottom))
14591 return 0;
14592 /* FALLTHRU */
14593
14594 case MULT_EXPR:
14595 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14596 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14597
14598 case PLUS_EXPR:
14599 case MINUS_EXPR:
14600 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14601 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14602
14603 case LSHIFT_EXPR:
14604 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14605 {
14606 tree op1, t1;
14607
14608 op1 = TREE_OPERAND (top, 1);
14609 /* const_binop may not detect overflow correctly,
14610 so check for it explicitly here. */
14611 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14612 && 0 != (t1 = fold_convert (type,
14613 const_binop (LSHIFT_EXPR,
14614 size_one_node,
14615 op1)))
14616 && !TREE_OVERFLOW (t1))
14617 return multiple_of_p (type, t1, bottom);
14618 }
14619 return 0;
14620
14621 case NOP_EXPR:
14622 /* Can't handle conversions from non-integral or wider integral type. */
14623 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14624 || (TYPE_PRECISION (type)
14625 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14626 return 0;
14627
14628 /* .. fall through ... */
14629
14630 case SAVE_EXPR:
14631 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14632
14633 case COND_EXPR:
14634 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14635 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14636
14637 case INTEGER_CST:
14638 if (TREE_CODE (bottom) != INTEGER_CST
14639 || integer_zerop (bottom)
14640 || (TYPE_UNSIGNED (type)
14641 && (tree_int_cst_sgn (top) < 0
14642 || tree_int_cst_sgn (bottom) < 0)))
14643 return 0;
14644 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14645 SIGNED);
14646
14647 default:
14648 return 0;
14649 }
14650 }
14651
14652 /* Return true if CODE or TYPE is known to be non-negative. */
14653
14654 static bool
14655 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14656 {
14657 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14658 && truth_value_p (code))
14659 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14660 have a signed:1 type (where the value is -1 and 0). */
14661 return true;
14662 return false;
14663 }
14664
14665 /* Return true if (CODE OP0) is known to be non-negative. If the return
14666 value is based on the assumption that signed overflow is undefined,
14667 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14668 *STRICT_OVERFLOW_P. */
14669
14670 bool
14671 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14672 bool *strict_overflow_p)
14673 {
14674 if (TYPE_UNSIGNED (type))
14675 return true;
14676
14677 switch (code)
14678 {
14679 case ABS_EXPR:
14680 /* We can't return 1 if flag_wrapv is set because
14681 ABS_EXPR<INT_MIN> = INT_MIN. */
14682 if (!INTEGRAL_TYPE_P (type))
14683 return true;
14684 if (TYPE_OVERFLOW_UNDEFINED (type))
14685 {
14686 *strict_overflow_p = true;
14687 return true;
14688 }
14689 break;
14690
14691 case NON_LVALUE_EXPR:
14692 case FLOAT_EXPR:
14693 case FIX_TRUNC_EXPR:
14694 return tree_expr_nonnegative_warnv_p (op0,
14695 strict_overflow_p);
14696
14697 CASE_CONVERT:
14698 {
14699 tree inner_type = TREE_TYPE (op0);
14700 tree outer_type = type;
14701
14702 if (TREE_CODE (outer_type) == REAL_TYPE)
14703 {
14704 if (TREE_CODE (inner_type) == REAL_TYPE)
14705 return tree_expr_nonnegative_warnv_p (op0,
14706 strict_overflow_p);
14707 if (INTEGRAL_TYPE_P (inner_type))
14708 {
14709 if (TYPE_UNSIGNED (inner_type))
14710 return true;
14711 return tree_expr_nonnegative_warnv_p (op0,
14712 strict_overflow_p);
14713 }
14714 }
14715 else if (INTEGRAL_TYPE_P (outer_type))
14716 {
14717 if (TREE_CODE (inner_type) == REAL_TYPE)
14718 return tree_expr_nonnegative_warnv_p (op0,
14719 strict_overflow_p);
14720 if (INTEGRAL_TYPE_P (inner_type))
14721 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14722 && TYPE_UNSIGNED (inner_type);
14723 }
14724 }
14725 break;
14726
14727 default:
14728 return tree_simple_nonnegative_warnv_p (code, type);
14729 }
14730
14731 /* We don't know sign of `t', so be conservative and return false. */
14732 return false;
14733 }
14734
14735 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14736 value is based on the assumption that signed overflow is undefined,
14737 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14738 *STRICT_OVERFLOW_P. */
14739
14740 bool
14741 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14742 tree op1, bool *strict_overflow_p)
14743 {
14744 if (TYPE_UNSIGNED (type))
14745 return true;
14746
14747 switch (code)
14748 {
14749 case POINTER_PLUS_EXPR:
14750 case PLUS_EXPR:
14751 if (FLOAT_TYPE_P (type))
14752 return (tree_expr_nonnegative_warnv_p (op0,
14753 strict_overflow_p)
14754 && tree_expr_nonnegative_warnv_p (op1,
14755 strict_overflow_p));
14756
14757 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14758 both unsigned and at least 2 bits shorter than the result. */
14759 if (TREE_CODE (type) == INTEGER_TYPE
14760 && TREE_CODE (op0) == NOP_EXPR
14761 && TREE_CODE (op1) == NOP_EXPR)
14762 {
14763 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14764 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14765 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14766 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14767 {
14768 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14769 TYPE_PRECISION (inner2)) + 1;
14770 return prec < TYPE_PRECISION (type);
14771 }
14772 }
14773 break;
14774
14775 case MULT_EXPR:
14776 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14777 {
14778 /* x * x is always non-negative for floating point x
14779 or without overflow. */
14780 if (operand_equal_p (op0, op1, 0)
14781 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14782 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14783 {
14784 if (ANY_INTEGRAL_TYPE_P (type)
14785 && TYPE_OVERFLOW_UNDEFINED (type))
14786 *strict_overflow_p = true;
14787 return true;
14788 }
14789 }
14790
14791 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14792 both unsigned and their total bits is shorter than the result. */
14793 if (TREE_CODE (type) == INTEGER_TYPE
14794 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14795 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14796 {
14797 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14798 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14799 : TREE_TYPE (op0);
14800 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14801 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14802 : TREE_TYPE (op1);
14803
14804 bool unsigned0 = TYPE_UNSIGNED (inner0);
14805 bool unsigned1 = TYPE_UNSIGNED (inner1);
14806
14807 if (TREE_CODE (op0) == INTEGER_CST)
14808 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14809
14810 if (TREE_CODE (op1) == INTEGER_CST)
14811 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14812
14813 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14814 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14815 {
14816 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14817 ? tree_int_cst_min_precision (op0, UNSIGNED)
14818 : TYPE_PRECISION (inner0);
14819
14820 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14821 ? tree_int_cst_min_precision (op1, UNSIGNED)
14822 : TYPE_PRECISION (inner1);
14823
14824 return precision0 + precision1 < TYPE_PRECISION (type);
14825 }
14826 }
14827 return false;
14828
14829 case BIT_AND_EXPR:
14830 case MAX_EXPR:
14831 return (tree_expr_nonnegative_warnv_p (op0,
14832 strict_overflow_p)
14833 || tree_expr_nonnegative_warnv_p (op1,
14834 strict_overflow_p));
14835
14836 case BIT_IOR_EXPR:
14837 case BIT_XOR_EXPR:
14838 case MIN_EXPR:
14839 case RDIV_EXPR:
14840 case TRUNC_DIV_EXPR:
14841 case CEIL_DIV_EXPR:
14842 case FLOOR_DIV_EXPR:
14843 case ROUND_DIV_EXPR:
14844 return (tree_expr_nonnegative_warnv_p (op0,
14845 strict_overflow_p)
14846 && tree_expr_nonnegative_warnv_p (op1,
14847 strict_overflow_p));
14848
14849 case TRUNC_MOD_EXPR:
14850 case CEIL_MOD_EXPR:
14851 case FLOOR_MOD_EXPR:
14852 case ROUND_MOD_EXPR:
14853 return tree_expr_nonnegative_warnv_p (op0,
14854 strict_overflow_p);
14855 default:
14856 return tree_simple_nonnegative_warnv_p (code, type);
14857 }
14858
14859 /* We don't know sign of `t', so be conservative and return false. */
14860 return false;
14861 }
14862
14863 /* Return true if T is known to be non-negative. If the return
14864 value is based on the assumption that signed overflow is undefined,
14865 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14866 *STRICT_OVERFLOW_P. */
14867
14868 bool
14869 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14870 {
14871 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14872 return true;
14873
14874 switch (TREE_CODE (t))
14875 {
14876 case INTEGER_CST:
14877 return tree_int_cst_sgn (t) >= 0;
14878
14879 case REAL_CST:
14880 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14881
14882 case FIXED_CST:
14883 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14884
14885 case COND_EXPR:
14886 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14887 strict_overflow_p)
14888 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14889 strict_overflow_p));
14890 default:
14891 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14892 TREE_TYPE (t));
14893 }
14894 /* We don't know sign of `t', so be conservative and return false. */
14895 return false;
14896 }
14897
14898 /* Return true if T is known to be non-negative. If the return
14899 value is based on the assumption that signed overflow is undefined,
14900 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14901 *STRICT_OVERFLOW_P. */
14902
14903 bool
14904 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14905 tree arg0, tree arg1, bool *strict_overflow_p)
14906 {
14907 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14908 switch (DECL_FUNCTION_CODE (fndecl))
14909 {
14910 CASE_FLT_FN (BUILT_IN_ACOS):
14911 CASE_FLT_FN (BUILT_IN_ACOSH):
14912 CASE_FLT_FN (BUILT_IN_CABS):
14913 CASE_FLT_FN (BUILT_IN_COSH):
14914 CASE_FLT_FN (BUILT_IN_ERFC):
14915 CASE_FLT_FN (BUILT_IN_EXP):
14916 CASE_FLT_FN (BUILT_IN_EXP10):
14917 CASE_FLT_FN (BUILT_IN_EXP2):
14918 CASE_FLT_FN (BUILT_IN_FABS):
14919 CASE_FLT_FN (BUILT_IN_FDIM):
14920 CASE_FLT_FN (BUILT_IN_HYPOT):
14921 CASE_FLT_FN (BUILT_IN_POW10):
14922 CASE_INT_FN (BUILT_IN_FFS):
14923 CASE_INT_FN (BUILT_IN_PARITY):
14924 CASE_INT_FN (BUILT_IN_POPCOUNT):
14925 CASE_INT_FN (BUILT_IN_CLZ):
14926 CASE_INT_FN (BUILT_IN_CLRSB):
14927 case BUILT_IN_BSWAP32:
14928 case BUILT_IN_BSWAP64:
14929 /* Always true. */
14930 return true;
14931
14932 CASE_FLT_FN (BUILT_IN_SQRT):
14933 /* sqrt(-0.0) is -0.0. */
14934 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14935 return true;
14936 return tree_expr_nonnegative_warnv_p (arg0,
14937 strict_overflow_p);
14938
14939 CASE_FLT_FN (BUILT_IN_ASINH):
14940 CASE_FLT_FN (BUILT_IN_ATAN):
14941 CASE_FLT_FN (BUILT_IN_ATANH):
14942 CASE_FLT_FN (BUILT_IN_CBRT):
14943 CASE_FLT_FN (BUILT_IN_CEIL):
14944 CASE_FLT_FN (BUILT_IN_ERF):
14945 CASE_FLT_FN (BUILT_IN_EXPM1):
14946 CASE_FLT_FN (BUILT_IN_FLOOR):
14947 CASE_FLT_FN (BUILT_IN_FMOD):
14948 CASE_FLT_FN (BUILT_IN_FREXP):
14949 CASE_FLT_FN (BUILT_IN_ICEIL):
14950 CASE_FLT_FN (BUILT_IN_IFLOOR):
14951 CASE_FLT_FN (BUILT_IN_IRINT):
14952 CASE_FLT_FN (BUILT_IN_IROUND):
14953 CASE_FLT_FN (BUILT_IN_LCEIL):
14954 CASE_FLT_FN (BUILT_IN_LDEXP):
14955 CASE_FLT_FN (BUILT_IN_LFLOOR):
14956 CASE_FLT_FN (BUILT_IN_LLCEIL):
14957 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14958 CASE_FLT_FN (BUILT_IN_LLRINT):
14959 CASE_FLT_FN (BUILT_IN_LLROUND):
14960 CASE_FLT_FN (BUILT_IN_LRINT):
14961 CASE_FLT_FN (BUILT_IN_LROUND):
14962 CASE_FLT_FN (BUILT_IN_MODF):
14963 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14964 CASE_FLT_FN (BUILT_IN_RINT):
14965 CASE_FLT_FN (BUILT_IN_ROUND):
14966 CASE_FLT_FN (BUILT_IN_SCALB):
14967 CASE_FLT_FN (BUILT_IN_SCALBLN):
14968 CASE_FLT_FN (BUILT_IN_SCALBN):
14969 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14970 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14971 CASE_FLT_FN (BUILT_IN_SINH):
14972 CASE_FLT_FN (BUILT_IN_TANH):
14973 CASE_FLT_FN (BUILT_IN_TRUNC):
14974 /* True if the 1st argument is nonnegative. */
14975 return tree_expr_nonnegative_warnv_p (arg0,
14976 strict_overflow_p);
14977
14978 CASE_FLT_FN (BUILT_IN_FMAX):
14979 /* True if the 1st OR 2nd arguments are nonnegative. */
14980 return (tree_expr_nonnegative_warnv_p (arg0,
14981 strict_overflow_p)
14982 || (tree_expr_nonnegative_warnv_p (arg1,
14983 strict_overflow_p)));
14984
14985 CASE_FLT_FN (BUILT_IN_FMIN):
14986 /* True if the 1st AND 2nd arguments are nonnegative. */
14987 return (tree_expr_nonnegative_warnv_p (arg0,
14988 strict_overflow_p)
14989 && (tree_expr_nonnegative_warnv_p (arg1,
14990 strict_overflow_p)));
14991
14992 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14993 /* True if the 2nd argument is nonnegative. */
14994 return tree_expr_nonnegative_warnv_p (arg1,
14995 strict_overflow_p);
14996
14997 CASE_FLT_FN (BUILT_IN_POWI):
14998 /* True if the 1st argument is nonnegative or the second
14999 argument is an even integer. */
15000 if (TREE_CODE (arg1) == INTEGER_CST
15001 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
15002 return true;
15003 return tree_expr_nonnegative_warnv_p (arg0,
15004 strict_overflow_p);
15005
15006 CASE_FLT_FN (BUILT_IN_POW):
15007 /* True if the 1st argument is nonnegative or the second
15008 argument is an even integer valued real. */
15009 if (TREE_CODE (arg1) == REAL_CST)
15010 {
15011 REAL_VALUE_TYPE c;
15012 HOST_WIDE_INT n;
15013
15014 c = TREE_REAL_CST (arg1);
15015 n = real_to_integer (&c);
15016 if ((n & 1) == 0)
15017 {
15018 REAL_VALUE_TYPE cint;
15019 real_from_integer (&cint, VOIDmode, n, SIGNED);
15020 if (real_identical (&c, &cint))
15021 return true;
15022 }
15023 }
15024 return tree_expr_nonnegative_warnv_p (arg0,
15025 strict_overflow_p);
15026
15027 default:
15028 break;
15029 }
15030 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
15031 type);
15032 }
15033
15034 /* Return true if T is known to be non-negative. If the return
15035 value is based on the assumption that signed overflow is undefined,
15036 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15037 *STRICT_OVERFLOW_P. */
15038
15039 static bool
15040 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15041 {
15042 enum tree_code code = TREE_CODE (t);
15043 if (TYPE_UNSIGNED (TREE_TYPE (t)))
15044 return true;
15045
15046 switch (code)
15047 {
15048 case TARGET_EXPR:
15049 {
15050 tree temp = TARGET_EXPR_SLOT (t);
15051 t = TARGET_EXPR_INITIAL (t);
15052
15053 /* If the initializer is non-void, then it's a normal expression
15054 that will be assigned to the slot. */
15055 if (!VOID_TYPE_P (t))
15056 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15057
15058 /* Otherwise, the initializer sets the slot in some way. One common
15059 way is an assignment statement at the end of the initializer. */
15060 while (1)
15061 {
15062 if (TREE_CODE (t) == BIND_EXPR)
15063 t = expr_last (BIND_EXPR_BODY (t));
15064 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15065 || TREE_CODE (t) == TRY_CATCH_EXPR)
15066 t = expr_last (TREE_OPERAND (t, 0));
15067 else if (TREE_CODE (t) == STATEMENT_LIST)
15068 t = expr_last (t);
15069 else
15070 break;
15071 }
15072 if (TREE_CODE (t) == MODIFY_EXPR
15073 && TREE_OPERAND (t, 0) == temp)
15074 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15075 strict_overflow_p);
15076
15077 return false;
15078 }
15079
15080 case CALL_EXPR:
15081 {
15082 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15083 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15084
15085 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15086 get_callee_fndecl (t),
15087 arg0,
15088 arg1,
15089 strict_overflow_p);
15090 }
15091 case COMPOUND_EXPR:
15092 case MODIFY_EXPR:
15093 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15094 strict_overflow_p);
15095 case BIND_EXPR:
15096 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15097 strict_overflow_p);
15098 case SAVE_EXPR:
15099 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15100 strict_overflow_p);
15101
15102 default:
15103 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15104 TREE_TYPE (t));
15105 }
15106
15107 /* We don't know sign of `t', so be conservative and return false. */
15108 return false;
15109 }
15110
15111 /* Return true if T is known to be non-negative. If the return
15112 value is based on the assumption that signed overflow is undefined,
15113 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15114 *STRICT_OVERFLOW_P. */
15115
15116 bool
15117 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15118 {
15119 enum tree_code code;
15120 if (t == error_mark_node)
15121 return false;
15122
15123 code = TREE_CODE (t);
15124 switch (TREE_CODE_CLASS (code))
15125 {
15126 case tcc_binary:
15127 case tcc_comparison:
15128 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15129 TREE_TYPE (t),
15130 TREE_OPERAND (t, 0),
15131 TREE_OPERAND (t, 1),
15132 strict_overflow_p);
15133
15134 case tcc_unary:
15135 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15136 TREE_TYPE (t),
15137 TREE_OPERAND (t, 0),
15138 strict_overflow_p);
15139
15140 case tcc_constant:
15141 case tcc_declaration:
15142 case tcc_reference:
15143 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15144
15145 default:
15146 break;
15147 }
15148
15149 switch (code)
15150 {
15151 case TRUTH_AND_EXPR:
15152 case TRUTH_OR_EXPR:
15153 case TRUTH_XOR_EXPR:
15154 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15155 TREE_TYPE (t),
15156 TREE_OPERAND (t, 0),
15157 TREE_OPERAND (t, 1),
15158 strict_overflow_p);
15159 case TRUTH_NOT_EXPR:
15160 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15161 TREE_TYPE (t),
15162 TREE_OPERAND (t, 0),
15163 strict_overflow_p);
15164
15165 case COND_EXPR:
15166 case CONSTRUCTOR:
15167 case OBJ_TYPE_REF:
15168 case ASSERT_EXPR:
15169 case ADDR_EXPR:
15170 case WITH_SIZE_EXPR:
15171 case SSA_NAME:
15172 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15173
15174 default:
15175 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15176 }
15177 }
15178
15179 /* Return true if `t' is known to be non-negative. Handle warnings
15180 about undefined signed overflow. */
15181
15182 bool
15183 tree_expr_nonnegative_p (tree t)
15184 {
15185 bool ret, strict_overflow_p;
15186
15187 strict_overflow_p = false;
15188 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15189 if (strict_overflow_p)
15190 fold_overflow_warning (("assuming signed overflow does not occur when "
15191 "determining that expression is always "
15192 "non-negative"),
15193 WARN_STRICT_OVERFLOW_MISC);
15194 return ret;
15195 }
15196
15197
15198 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15199 For floating point we further ensure that T is not denormal.
15200 Similar logic is present in nonzero_address in rtlanal.h.
15201
15202 If the return value is based on the assumption that signed overflow
15203 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15204 change *STRICT_OVERFLOW_P. */
15205
15206 bool
15207 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15208 bool *strict_overflow_p)
15209 {
15210 switch (code)
15211 {
15212 case ABS_EXPR:
15213 return tree_expr_nonzero_warnv_p (op0,
15214 strict_overflow_p);
15215
15216 case NOP_EXPR:
15217 {
15218 tree inner_type = TREE_TYPE (op0);
15219 tree outer_type = type;
15220
15221 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15222 && tree_expr_nonzero_warnv_p (op0,
15223 strict_overflow_p));
15224 }
15225 break;
15226
15227 case NON_LVALUE_EXPR:
15228 return tree_expr_nonzero_warnv_p (op0,
15229 strict_overflow_p);
15230
15231 default:
15232 break;
15233 }
15234
15235 return false;
15236 }
15237
15238 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15239 For floating point we further ensure that T is not denormal.
15240 Similar logic is present in nonzero_address in rtlanal.h.
15241
15242 If the return value is based on the assumption that signed overflow
15243 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15244 change *STRICT_OVERFLOW_P. */
15245
15246 bool
15247 tree_binary_nonzero_warnv_p (enum tree_code code,
15248 tree type,
15249 tree op0,
15250 tree op1, bool *strict_overflow_p)
15251 {
15252 bool sub_strict_overflow_p;
15253 switch (code)
15254 {
15255 case POINTER_PLUS_EXPR:
15256 case PLUS_EXPR:
15257 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15258 {
15259 /* With the presence of negative values it is hard
15260 to say something. */
15261 sub_strict_overflow_p = false;
15262 if (!tree_expr_nonnegative_warnv_p (op0,
15263 &sub_strict_overflow_p)
15264 || !tree_expr_nonnegative_warnv_p (op1,
15265 &sub_strict_overflow_p))
15266 return false;
15267 /* One of operands must be positive and the other non-negative. */
15268 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15269 overflows, on a twos-complement machine the sum of two
15270 nonnegative numbers can never be zero. */
15271 return (tree_expr_nonzero_warnv_p (op0,
15272 strict_overflow_p)
15273 || tree_expr_nonzero_warnv_p (op1,
15274 strict_overflow_p));
15275 }
15276 break;
15277
15278 case MULT_EXPR:
15279 if (TYPE_OVERFLOW_UNDEFINED (type))
15280 {
15281 if (tree_expr_nonzero_warnv_p (op0,
15282 strict_overflow_p)
15283 && tree_expr_nonzero_warnv_p (op1,
15284 strict_overflow_p))
15285 {
15286 *strict_overflow_p = true;
15287 return true;
15288 }
15289 }
15290 break;
15291
15292 case MIN_EXPR:
15293 sub_strict_overflow_p = false;
15294 if (tree_expr_nonzero_warnv_p (op0,
15295 &sub_strict_overflow_p)
15296 && tree_expr_nonzero_warnv_p (op1,
15297 &sub_strict_overflow_p))
15298 {
15299 if (sub_strict_overflow_p)
15300 *strict_overflow_p = true;
15301 }
15302 break;
15303
15304 case MAX_EXPR:
15305 sub_strict_overflow_p = false;
15306 if (tree_expr_nonzero_warnv_p (op0,
15307 &sub_strict_overflow_p))
15308 {
15309 if (sub_strict_overflow_p)
15310 *strict_overflow_p = true;
15311
15312 /* When both operands are nonzero, then MAX must be too. */
15313 if (tree_expr_nonzero_warnv_p (op1,
15314 strict_overflow_p))
15315 return true;
15316
15317 /* MAX where operand 0 is positive is positive. */
15318 return tree_expr_nonnegative_warnv_p (op0,
15319 strict_overflow_p);
15320 }
15321 /* MAX where operand 1 is positive is positive. */
15322 else if (tree_expr_nonzero_warnv_p (op1,
15323 &sub_strict_overflow_p)
15324 && tree_expr_nonnegative_warnv_p (op1,
15325 &sub_strict_overflow_p))
15326 {
15327 if (sub_strict_overflow_p)
15328 *strict_overflow_p = true;
15329 return true;
15330 }
15331 break;
15332
15333 case BIT_IOR_EXPR:
15334 return (tree_expr_nonzero_warnv_p (op1,
15335 strict_overflow_p)
15336 || tree_expr_nonzero_warnv_p (op0,
15337 strict_overflow_p));
15338
15339 default:
15340 break;
15341 }
15342
15343 return false;
15344 }
15345
15346 /* Return true when T is an address and is known to be nonzero.
15347 For floating point we further ensure that T is not denormal.
15348 Similar logic is present in nonzero_address in rtlanal.h.
15349
15350 If the return value is based on the assumption that signed overflow
15351 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15352 change *STRICT_OVERFLOW_P. */
15353
15354 bool
15355 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15356 {
15357 bool sub_strict_overflow_p;
15358 switch (TREE_CODE (t))
15359 {
15360 case INTEGER_CST:
15361 return !integer_zerop (t);
15362
15363 case ADDR_EXPR:
15364 {
15365 tree base = TREE_OPERAND (t, 0);
15366
15367 if (!DECL_P (base))
15368 base = get_base_address (base);
15369
15370 if (!base)
15371 return false;
15372
15373 /* For objects in symbol table check if we know they are non-zero.
15374 Don't do anything for variables and functions before symtab is built;
15375 it is quite possible that they will be declared weak later. */
15376 if (DECL_P (base) && decl_in_symtab_p (base))
15377 {
15378 struct symtab_node *symbol;
15379
15380 symbol = symtab_node::get_create (base);
15381 if (symbol)
15382 return symbol->nonzero_address ();
15383 else
15384 return false;
15385 }
15386
15387 /* Function local objects are never NULL. */
15388 if (DECL_P (base)
15389 && (DECL_CONTEXT (base)
15390 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15391 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15392 return true;
15393
15394 /* Constants are never weak. */
15395 if (CONSTANT_CLASS_P (base))
15396 return true;
15397
15398 return false;
15399 }
15400
15401 case COND_EXPR:
15402 sub_strict_overflow_p = false;
15403 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15404 &sub_strict_overflow_p)
15405 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15406 &sub_strict_overflow_p))
15407 {
15408 if (sub_strict_overflow_p)
15409 *strict_overflow_p = true;
15410 return true;
15411 }
15412 break;
15413
15414 default:
15415 break;
15416 }
15417 return false;
15418 }
15419
15420 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15421 attempt to fold the expression to a constant without modifying TYPE,
15422 OP0 or OP1.
15423
15424 If the expression could be simplified to a constant, then return
15425 the constant. If the expression would not be simplified to a
15426 constant, then return NULL_TREE. */
15427
15428 tree
15429 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15430 {
15431 tree tem = fold_binary (code, type, op0, op1);
15432 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15433 }
15434
15435 /* Given the components of a unary expression CODE, TYPE and OP0,
15436 attempt to fold the expression to a constant without modifying
15437 TYPE or OP0.
15438
15439 If the expression could be simplified to a constant, then return
15440 the constant. If the expression would not be simplified to a
15441 constant, then return NULL_TREE. */
15442
15443 tree
15444 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15445 {
15446 tree tem = fold_unary (code, type, op0);
15447 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15448 }
15449
15450 /* If EXP represents referencing an element in a constant string
15451 (either via pointer arithmetic or array indexing), return the
15452 tree representing the value accessed, otherwise return NULL. */
15453
15454 tree
15455 fold_read_from_constant_string (tree exp)
15456 {
15457 if ((TREE_CODE (exp) == INDIRECT_REF
15458 || TREE_CODE (exp) == ARRAY_REF)
15459 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15460 {
15461 tree exp1 = TREE_OPERAND (exp, 0);
15462 tree index;
15463 tree string;
15464 location_t loc = EXPR_LOCATION (exp);
15465
15466 if (TREE_CODE (exp) == INDIRECT_REF)
15467 string = string_constant (exp1, &index);
15468 else
15469 {
15470 tree low_bound = array_ref_low_bound (exp);
15471 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15472
15473 /* Optimize the special-case of a zero lower bound.
15474
15475 We convert the low_bound to sizetype to avoid some problems
15476 with constant folding. (E.g. suppose the lower bound is 1,
15477 and its mode is QI. Without the conversion,l (ARRAY
15478 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15479 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15480 if (! integer_zerop (low_bound))
15481 index = size_diffop_loc (loc, index,
15482 fold_convert_loc (loc, sizetype, low_bound));
15483
15484 string = exp1;
15485 }
15486
15487 if (string
15488 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15489 && TREE_CODE (string) == STRING_CST
15490 && TREE_CODE (index) == INTEGER_CST
15491 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15492 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15493 == MODE_INT)
15494 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15495 return build_int_cst_type (TREE_TYPE (exp),
15496 (TREE_STRING_POINTER (string)
15497 [TREE_INT_CST_LOW (index)]));
15498 }
15499 return NULL;
15500 }
15501
15502 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15503 an integer constant, real, or fixed-point constant.
15504
15505 TYPE is the type of the result. */
15506
15507 static tree
15508 fold_negate_const (tree arg0, tree type)
15509 {
15510 tree t = NULL_TREE;
15511
15512 switch (TREE_CODE (arg0))
15513 {
15514 case INTEGER_CST:
15515 {
15516 bool overflow;
15517 wide_int val = wi::neg (arg0, &overflow);
15518 t = force_fit_type (type, val, 1,
15519 (overflow | TREE_OVERFLOW (arg0))
15520 && !TYPE_UNSIGNED (type));
15521 break;
15522 }
15523
15524 case REAL_CST:
15525 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15526 break;
15527
15528 case FIXED_CST:
15529 {
15530 FIXED_VALUE_TYPE f;
15531 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15532 &(TREE_FIXED_CST (arg0)), NULL,
15533 TYPE_SATURATING (type));
15534 t = build_fixed (type, f);
15535 /* Propagate overflow flags. */
15536 if (overflow_p | TREE_OVERFLOW (arg0))
15537 TREE_OVERFLOW (t) = 1;
15538 break;
15539 }
15540
15541 default:
15542 gcc_unreachable ();
15543 }
15544
15545 return t;
15546 }
15547
15548 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15549 an integer constant or real constant.
15550
15551 TYPE is the type of the result. */
15552
15553 tree
15554 fold_abs_const (tree arg0, tree type)
15555 {
15556 tree t = NULL_TREE;
15557
15558 switch (TREE_CODE (arg0))
15559 {
15560 case INTEGER_CST:
15561 {
15562 /* If the value is unsigned or non-negative, then the absolute value
15563 is the same as the ordinary value. */
15564 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15565 t = arg0;
15566
15567 /* If the value is negative, then the absolute value is
15568 its negation. */
15569 else
15570 {
15571 bool overflow;
15572 wide_int val = wi::neg (arg0, &overflow);
15573 t = force_fit_type (type, val, -1,
15574 overflow | TREE_OVERFLOW (arg0));
15575 }
15576 }
15577 break;
15578
15579 case REAL_CST:
15580 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15581 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15582 else
15583 t = arg0;
15584 break;
15585
15586 default:
15587 gcc_unreachable ();
15588 }
15589
15590 return t;
15591 }
15592
15593 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15594 constant. TYPE is the type of the result. */
15595
15596 static tree
15597 fold_not_const (const_tree arg0, tree type)
15598 {
15599 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15600
15601 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15602 }
15603
15604 /* Given CODE, a relational operator, the target type, TYPE and two
15605 constant operands OP0 and OP1, return the result of the
15606 relational operation. If the result is not a compile time
15607 constant, then return NULL_TREE. */
15608
15609 static tree
15610 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15611 {
15612 int result, invert;
15613
15614 /* From here on, the only cases we handle are when the result is
15615 known to be a constant. */
15616
15617 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15618 {
15619 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15620 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15621
15622 /* Handle the cases where either operand is a NaN. */
15623 if (real_isnan (c0) || real_isnan (c1))
15624 {
15625 switch (code)
15626 {
15627 case EQ_EXPR:
15628 case ORDERED_EXPR:
15629 result = 0;
15630 break;
15631
15632 case NE_EXPR:
15633 case UNORDERED_EXPR:
15634 case UNLT_EXPR:
15635 case UNLE_EXPR:
15636 case UNGT_EXPR:
15637 case UNGE_EXPR:
15638 case UNEQ_EXPR:
15639 result = 1;
15640 break;
15641
15642 case LT_EXPR:
15643 case LE_EXPR:
15644 case GT_EXPR:
15645 case GE_EXPR:
15646 case LTGT_EXPR:
15647 if (flag_trapping_math)
15648 return NULL_TREE;
15649 result = 0;
15650 break;
15651
15652 default:
15653 gcc_unreachable ();
15654 }
15655
15656 return constant_boolean_node (result, type);
15657 }
15658
15659 return constant_boolean_node (real_compare (code, c0, c1), type);
15660 }
15661
15662 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15663 {
15664 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15665 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15666 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15667 }
15668
15669 /* Handle equality/inequality of complex constants. */
15670 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15671 {
15672 tree rcond = fold_relational_const (code, type,
15673 TREE_REALPART (op0),
15674 TREE_REALPART (op1));
15675 tree icond = fold_relational_const (code, type,
15676 TREE_IMAGPART (op0),
15677 TREE_IMAGPART (op1));
15678 if (code == EQ_EXPR)
15679 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15680 else if (code == NE_EXPR)
15681 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15682 else
15683 return NULL_TREE;
15684 }
15685
15686 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15687 {
15688 unsigned count = VECTOR_CST_NELTS (op0);
15689 tree *elts = XALLOCAVEC (tree, count);
15690 gcc_assert (VECTOR_CST_NELTS (op1) == count
15691 && TYPE_VECTOR_SUBPARTS (type) == count);
15692
15693 for (unsigned i = 0; i < count; i++)
15694 {
15695 tree elem_type = TREE_TYPE (type);
15696 tree elem0 = VECTOR_CST_ELT (op0, i);
15697 tree elem1 = VECTOR_CST_ELT (op1, i);
15698
15699 tree tem = fold_relational_const (code, elem_type,
15700 elem0, elem1);
15701
15702 if (tem == NULL_TREE)
15703 return NULL_TREE;
15704
15705 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15706 }
15707
15708 return build_vector (type, elts);
15709 }
15710
15711 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15712
15713 To compute GT, swap the arguments and do LT.
15714 To compute GE, do LT and invert the result.
15715 To compute LE, swap the arguments, do LT and invert the result.
15716 To compute NE, do EQ and invert the result.
15717
15718 Therefore, the code below must handle only EQ and LT. */
15719
15720 if (code == LE_EXPR || code == GT_EXPR)
15721 {
15722 tree tem = op0;
15723 op0 = op1;
15724 op1 = tem;
15725 code = swap_tree_comparison (code);
15726 }
15727
15728 /* Note that it is safe to invert for real values here because we
15729 have already handled the one case that it matters. */
15730
15731 invert = 0;
15732 if (code == NE_EXPR || code == GE_EXPR)
15733 {
15734 invert = 1;
15735 code = invert_tree_comparison (code, false);
15736 }
15737
15738 /* Compute a result for LT or EQ if args permit;
15739 Otherwise return T. */
15740 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15741 {
15742 if (code == EQ_EXPR)
15743 result = tree_int_cst_equal (op0, op1);
15744 else
15745 result = tree_int_cst_lt (op0, op1);
15746 }
15747 else
15748 return NULL_TREE;
15749
15750 if (invert)
15751 result ^= 1;
15752 return constant_boolean_node (result, type);
15753 }
15754
15755 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15756 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15757 itself. */
15758
15759 tree
15760 fold_build_cleanup_point_expr (tree type, tree expr)
15761 {
15762 /* If the expression does not have side effects then we don't have to wrap
15763 it with a cleanup point expression. */
15764 if (!TREE_SIDE_EFFECTS (expr))
15765 return expr;
15766
15767 /* If the expression is a return, check to see if the expression inside the
15768 return has no side effects or the right hand side of the modify expression
15769 inside the return. If either don't have side effects set we don't need to
15770 wrap the expression in a cleanup point expression. Note we don't check the
15771 left hand side of the modify because it should always be a return decl. */
15772 if (TREE_CODE (expr) == RETURN_EXPR)
15773 {
15774 tree op = TREE_OPERAND (expr, 0);
15775 if (!op || !TREE_SIDE_EFFECTS (op))
15776 return expr;
15777 op = TREE_OPERAND (op, 1);
15778 if (!TREE_SIDE_EFFECTS (op))
15779 return expr;
15780 }
15781
15782 return build1 (CLEANUP_POINT_EXPR, type, expr);
15783 }
15784
15785 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15786 of an indirection through OP0, or NULL_TREE if no simplification is
15787 possible. */
15788
15789 tree
15790 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15791 {
15792 tree sub = op0;
15793 tree subtype;
15794
15795 STRIP_NOPS (sub);
15796 subtype = TREE_TYPE (sub);
15797 if (!POINTER_TYPE_P (subtype))
15798 return NULL_TREE;
15799
15800 if (TREE_CODE (sub) == ADDR_EXPR)
15801 {
15802 tree op = TREE_OPERAND (sub, 0);
15803 tree optype = TREE_TYPE (op);
15804 /* *&CONST_DECL -> to the value of the const decl. */
15805 if (TREE_CODE (op) == CONST_DECL)
15806 return DECL_INITIAL (op);
15807 /* *&p => p; make sure to handle *&"str"[cst] here. */
15808 if (type == optype)
15809 {
15810 tree fop = fold_read_from_constant_string (op);
15811 if (fop)
15812 return fop;
15813 else
15814 return op;
15815 }
15816 /* *(foo *)&fooarray => fooarray[0] */
15817 else if (TREE_CODE (optype) == ARRAY_TYPE
15818 && type == TREE_TYPE (optype)
15819 && (!in_gimple_form
15820 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15821 {
15822 tree type_domain = TYPE_DOMAIN (optype);
15823 tree min_val = size_zero_node;
15824 if (type_domain && TYPE_MIN_VALUE (type_domain))
15825 min_val = TYPE_MIN_VALUE (type_domain);
15826 if (in_gimple_form
15827 && TREE_CODE (min_val) != INTEGER_CST)
15828 return NULL_TREE;
15829 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15830 NULL_TREE, NULL_TREE);
15831 }
15832 /* *(foo *)&complexfoo => __real__ complexfoo */
15833 else if (TREE_CODE (optype) == COMPLEX_TYPE
15834 && type == TREE_TYPE (optype))
15835 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15836 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15837 else if (TREE_CODE (optype) == VECTOR_TYPE
15838 && type == TREE_TYPE (optype))
15839 {
15840 tree part_width = TYPE_SIZE (type);
15841 tree index = bitsize_int (0);
15842 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15843 }
15844 }
15845
15846 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15847 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15848 {
15849 tree op00 = TREE_OPERAND (sub, 0);
15850 tree op01 = TREE_OPERAND (sub, 1);
15851
15852 STRIP_NOPS (op00);
15853 if (TREE_CODE (op00) == ADDR_EXPR)
15854 {
15855 tree op00type;
15856 op00 = TREE_OPERAND (op00, 0);
15857 op00type = TREE_TYPE (op00);
15858
15859 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15860 if (TREE_CODE (op00type) == VECTOR_TYPE
15861 && type == TREE_TYPE (op00type))
15862 {
15863 HOST_WIDE_INT offset = tree_to_shwi (op01);
15864 tree part_width = TYPE_SIZE (type);
15865 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15866 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15867 tree index = bitsize_int (indexi);
15868
15869 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15870 return fold_build3_loc (loc,
15871 BIT_FIELD_REF, type, op00,
15872 part_width, index);
15873
15874 }
15875 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15876 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15877 && type == TREE_TYPE (op00type))
15878 {
15879 tree size = TYPE_SIZE_UNIT (type);
15880 if (tree_int_cst_equal (size, op01))
15881 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15882 }
15883 /* ((foo *)&fooarray)[1] => fooarray[1] */
15884 else if (TREE_CODE (op00type) == ARRAY_TYPE
15885 && type == TREE_TYPE (op00type))
15886 {
15887 tree type_domain = TYPE_DOMAIN (op00type);
15888 tree min_val = size_zero_node;
15889 if (type_domain && TYPE_MIN_VALUE (type_domain))
15890 min_val = TYPE_MIN_VALUE (type_domain);
15891 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15892 TYPE_SIZE_UNIT (type));
15893 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15894 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15895 NULL_TREE, NULL_TREE);
15896 }
15897 }
15898 }
15899
15900 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15901 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15902 && type == TREE_TYPE (TREE_TYPE (subtype))
15903 && (!in_gimple_form
15904 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15905 {
15906 tree type_domain;
15907 tree min_val = size_zero_node;
15908 sub = build_fold_indirect_ref_loc (loc, sub);
15909 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15910 if (type_domain && TYPE_MIN_VALUE (type_domain))
15911 min_val = TYPE_MIN_VALUE (type_domain);
15912 if (in_gimple_form
15913 && TREE_CODE (min_val) != INTEGER_CST)
15914 return NULL_TREE;
15915 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15916 NULL_TREE);
15917 }
15918
15919 return NULL_TREE;
15920 }
15921
15922 /* Builds an expression for an indirection through T, simplifying some
15923 cases. */
15924
15925 tree
15926 build_fold_indirect_ref_loc (location_t loc, tree t)
15927 {
15928 tree type = TREE_TYPE (TREE_TYPE (t));
15929 tree sub = fold_indirect_ref_1 (loc, type, t);
15930
15931 if (sub)
15932 return sub;
15933
15934 return build1_loc (loc, INDIRECT_REF, type, t);
15935 }
15936
15937 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15938
15939 tree
15940 fold_indirect_ref_loc (location_t loc, tree t)
15941 {
15942 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15943
15944 if (sub)
15945 return sub;
15946 else
15947 return t;
15948 }
15949
15950 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15951 whose result is ignored. The type of the returned tree need not be
15952 the same as the original expression. */
15953
15954 tree
15955 fold_ignored_result (tree t)
15956 {
15957 if (!TREE_SIDE_EFFECTS (t))
15958 return integer_zero_node;
15959
15960 for (;;)
15961 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15962 {
15963 case tcc_unary:
15964 t = TREE_OPERAND (t, 0);
15965 break;
15966
15967 case tcc_binary:
15968 case tcc_comparison:
15969 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15970 t = TREE_OPERAND (t, 0);
15971 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15972 t = TREE_OPERAND (t, 1);
15973 else
15974 return t;
15975 break;
15976
15977 case tcc_expression:
15978 switch (TREE_CODE (t))
15979 {
15980 case COMPOUND_EXPR:
15981 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15982 return t;
15983 t = TREE_OPERAND (t, 0);
15984 break;
15985
15986 case COND_EXPR:
15987 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15988 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15989 return t;
15990 t = TREE_OPERAND (t, 0);
15991 break;
15992
15993 default:
15994 return t;
15995 }
15996 break;
15997
15998 default:
15999 return t;
16000 }
16001 }
16002
16003 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
16004
16005 tree
16006 round_up_loc (location_t loc, tree value, unsigned int divisor)
16007 {
16008 tree div = NULL_TREE;
16009
16010 if (divisor == 1)
16011 return value;
16012
16013 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16014 have to do anything. Only do this when we are not given a const,
16015 because in that case, this check is more expensive than just
16016 doing it. */
16017 if (TREE_CODE (value) != INTEGER_CST)
16018 {
16019 div = build_int_cst (TREE_TYPE (value), divisor);
16020
16021 if (multiple_of_p (TREE_TYPE (value), value, div))
16022 return value;
16023 }
16024
16025 /* If divisor is a power of two, simplify this to bit manipulation. */
16026 if (divisor == (divisor & -divisor))
16027 {
16028 if (TREE_CODE (value) == INTEGER_CST)
16029 {
16030 wide_int val = value;
16031 bool overflow_p;
16032
16033 if ((val & (divisor - 1)) == 0)
16034 return value;
16035
16036 overflow_p = TREE_OVERFLOW (value);
16037 val += divisor - 1;
16038 val &= - (int) divisor;
16039 if (val == 0)
16040 overflow_p = true;
16041
16042 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
16043 }
16044 else
16045 {
16046 tree t;
16047
16048 t = build_int_cst (TREE_TYPE (value), divisor - 1);
16049 value = size_binop_loc (loc, PLUS_EXPR, value, t);
16050 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
16051 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16052 }
16053 }
16054 else
16055 {
16056 if (!div)
16057 div = build_int_cst (TREE_TYPE (value), divisor);
16058 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16059 value = size_binop_loc (loc, MULT_EXPR, value, div);
16060 }
16061
16062 return value;
16063 }
16064
16065 /* Likewise, but round down. */
16066
16067 tree
16068 round_down_loc (location_t loc, tree value, int divisor)
16069 {
16070 tree div = NULL_TREE;
16071
16072 gcc_assert (divisor > 0);
16073 if (divisor == 1)
16074 return value;
16075
16076 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16077 have to do anything. Only do this when we are not given a const,
16078 because in that case, this check is more expensive than just
16079 doing it. */
16080 if (TREE_CODE (value) != INTEGER_CST)
16081 {
16082 div = build_int_cst (TREE_TYPE (value), divisor);
16083
16084 if (multiple_of_p (TREE_TYPE (value), value, div))
16085 return value;
16086 }
16087
16088 /* If divisor is a power of two, simplify this to bit manipulation. */
16089 if (divisor == (divisor & -divisor))
16090 {
16091 tree t;
16092
16093 t = build_int_cst (TREE_TYPE (value), -divisor);
16094 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16095 }
16096 else
16097 {
16098 if (!div)
16099 div = build_int_cst (TREE_TYPE (value), divisor);
16100 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16101 value = size_binop_loc (loc, MULT_EXPR, value, div);
16102 }
16103
16104 return value;
16105 }
16106
16107 /* Returns the pointer to the base of the object addressed by EXP and
16108 extracts the information about the offset of the access, storing it
16109 to PBITPOS and POFFSET. */
16110
16111 static tree
16112 split_address_to_core_and_offset (tree exp,
16113 HOST_WIDE_INT *pbitpos, tree *poffset)
16114 {
16115 tree core;
16116 machine_mode mode;
16117 int unsignedp, volatilep;
16118 HOST_WIDE_INT bitsize;
16119 location_t loc = EXPR_LOCATION (exp);
16120
16121 if (TREE_CODE (exp) == ADDR_EXPR)
16122 {
16123 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16124 poffset, &mode, &unsignedp, &volatilep,
16125 false);
16126 core = build_fold_addr_expr_loc (loc, core);
16127 }
16128 else
16129 {
16130 core = exp;
16131 *pbitpos = 0;
16132 *poffset = NULL_TREE;
16133 }
16134
16135 return core;
16136 }
16137
16138 /* Returns true if addresses of E1 and E2 differ by a constant, false
16139 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16140
16141 bool
16142 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16143 {
16144 tree core1, core2;
16145 HOST_WIDE_INT bitpos1, bitpos2;
16146 tree toffset1, toffset2, tdiff, type;
16147
16148 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16149 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16150
16151 if (bitpos1 % BITS_PER_UNIT != 0
16152 || bitpos2 % BITS_PER_UNIT != 0
16153 || !operand_equal_p (core1, core2, 0))
16154 return false;
16155
16156 if (toffset1 && toffset2)
16157 {
16158 type = TREE_TYPE (toffset1);
16159 if (type != TREE_TYPE (toffset2))
16160 toffset2 = fold_convert (type, toffset2);
16161
16162 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16163 if (!cst_and_fits_in_hwi (tdiff))
16164 return false;
16165
16166 *diff = int_cst_value (tdiff);
16167 }
16168 else if (toffset1 || toffset2)
16169 {
16170 /* If only one of the offsets is non-constant, the difference cannot
16171 be a constant. */
16172 return false;
16173 }
16174 else
16175 *diff = 0;
16176
16177 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16178 return true;
16179 }
16180
16181 /* Simplify the floating point expression EXP when the sign of the
16182 result is not significant. Return NULL_TREE if no simplification
16183 is possible. */
16184
16185 tree
16186 fold_strip_sign_ops (tree exp)
16187 {
16188 tree arg0, arg1;
16189 location_t loc = EXPR_LOCATION (exp);
16190
16191 switch (TREE_CODE (exp))
16192 {
16193 case ABS_EXPR:
16194 case NEGATE_EXPR:
16195 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16196 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16197
16198 case MULT_EXPR:
16199 case RDIV_EXPR:
16200 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16201 return NULL_TREE;
16202 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16203 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16204 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16205 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16206 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16207 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16208 break;
16209
16210 case COMPOUND_EXPR:
16211 arg0 = TREE_OPERAND (exp, 0);
16212 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16213 if (arg1)
16214 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16215 break;
16216
16217 case COND_EXPR:
16218 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16219 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16220 if (arg0 || arg1)
16221 return fold_build3_loc (loc,
16222 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16223 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16224 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16225 break;
16226
16227 case CALL_EXPR:
16228 {
16229 const enum built_in_function fcode = builtin_mathfn_code (exp);
16230 switch (fcode)
16231 {
16232 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16233 /* Strip copysign function call, return the 1st argument. */
16234 arg0 = CALL_EXPR_ARG (exp, 0);
16235 arg1 = CALL_EXPR_ARG (exp, 1);
16236 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16237
16238 default:
16239 /* Strip sign ops from the argument of "odd" math functions. */
16240 if (negate_mathfn_p (fcode))
16241 {
16242 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16243 if (arg0)
16244 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16245 }
16246 break;
16247 }
16248 }
16249 break;
16250
16251 default:
16252 break;
16253 }
16254 return NULL_TREE;
16255 }
16256
16257 /* Return OFF converted to a pointer offset type suitable as offset for
16258 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16259 tree
16260 convert_to_ptrofftype_loc (location_t loc, tree off)
16261 {
16262 return fold_convert_loc (loc, sizetype, off);
16263 }
16264
16265 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16266 tree
16267 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16268 {
16269 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16270 ptr, convert_to_ptrofftype_loc (loc, off));
16271 }
16272
16273 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16274 tree
16275 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16276 {
16277 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16278 ptr, size_int (off));
16279 }