coretypes.h: Include hash-table.h and hash-set.h for host files.
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2015 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "tm.h"
47 #include "flags.h"
48 #include "input.h"
49 #include "alias.h"
50 #include "symtab.h"
51 #include "tree.h"
52 #include "fold-const.h"
53 #include "stor-layout.h"
54 #include "calls.h"
55 #include "tree-iterator.h"
56 #include "realmpfr.h"
57 #include "rtl.h"
58 #include "hard-reg-set.h"
59 #include "function.h"
60 #include "insn-config.h"
61 #include "expmed.h"
62 #include "dojump.h"
63 #include "explow.h"
64 #include "emit-rtl.h"
65 #include "varasm.h"
66 #include "stmt.h"
67 #include "expr.h"
68 #include "tm_p.h"
69 #include "target.h"
70 #include "diagnostic-core.h"
71 #include "intl.h"
72 #include "langhooks.h"
73 #include "md5.h"
74 #include "predict.h"
75 #include "basic-block.h"
76 #include "tree-ssa-alias.h"
77 #include "internal-fn.h"
78 #include "tree-eh.h"
79 #include "gimple-expr.h"
80 #include "is-a.h"
81 #include "gimple.h"
82 #include "gimplify.h"
83 #include "tree-dfa.h"
84 #include "builtins.h"
85 #include "plugin-api.h"
86 #include "ipa-ref.h"
87 #include "cgraph.h"
88 #include "generic-match.h"
89 #include "optabs.h"
90
91 /* Nonzero if we are folding constants inside an initializer; zero
92 otherwise. */
93 int folding_initializer = 0;
94
95 /* The following constants represent a bit based encoding of GCC's
96 comparison operators. This encoding simplifies transformations
97 on relational comparison operators, such as AND and OR. */
98 enum comparison_code {
99 COMPCODE_FALSE = 0,
100 COMPCODE_LT = 1,
101 COMPCODE_EQ = 2,
102 COMPCODE_LE = 3,
103 COMPCODE_GT = 4,
104 COMPCODE_LTGT = 5,
105 COMPCODE_GE = 6,
106 COMPCODE_ORD = 7,
107 COMPCODE_UNORD = 8,
108 COMPCODE_UNLT = 9,
109 COMPCODE_UNEQ = 10,
110 COMPCODE_UNLE = 11,
111 COMPCODE_UNGT = 12,
112 COMPCODE_NE = 13,
113 COMPCODE_UNGE = 14,
114 COMPCODE_TRUE = 15
115 };
116
117 static bool negate_mathfn_p (enum built_in_function);
118 static bool negate_expr_p (tree);
119 static tree negate_expr (tree);
120 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
121 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
122 static enum comparison_code comparison_to_compcode (enum tree_code);
123 static enum tree_code compcode_to_comparison (enum comparison_code);
124 static int operand_equal_for_comparison_p (tree, tree, tree);
125 static int twoval_comparison_p (tree, tree *, tree *, int *);
126 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
127 static tree distribute_bit_expr (location_t, enum tree_code, tree, tree, tree);
128 static tree make_bit_field_ref (location_t, tree, tree,
129 HOST_WIDE_INT, HOST_WIDE_INT, int);
130 static tree optimize_bit_field_compare (location_t, enum tree_code,
131 tree, tree, tree);
132 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
133 HOST_WIDE_INT *,
134 machine_mode *, int *, int *,
135 tree *, tree *);
136 static int simple_operand_p (const_tree);
137 static bool simple_operand_p_2 (tree);
138 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
139 static tree range_predecessor (tree);
140 static tree range_successor (tree);
141 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
142 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
143 static tree unextend (tree, int, int, tree);
144 static tree optimize_minmax_comparison (location_t, enum tree_code,
145 tree, tree, tree);
146 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
147 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
148 static tree fold_binary_op_with_conditional_arg (location_t,
149 enum tree_code, tree,
150 tree, tree,
151 tree, tree, int);
152 static tree fold_mathfn_compare (location_t,
153 enum built_in_function, enum tree_code,
154 tree, tree, tree);
155 static tree fold_inf_compare (location_t, enum tree_code, tree, tree, tree);
156 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
157 static bool reorder_operands_p (const_tree, const_tree);
158 static tree fold_negate_const (tree, tree);
159 static tree fold_not_const (const_tree, tree);
160 static tree fold_relational_const (enum tree_code, tree, tree, tree);
161 static tree fold_convert_const (enum tree_code, tree, tree);
162 static tree fold_view_convert_expr (tree, tree);
163 static bool vec_cst_ctor_to_array (tree, tree *);
164
165
166 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
167 Otherwise, return LOC. */
168
169 static location_t
170 expr_location_or (tree t, location_t loc)
171 {
172 location_t tloc = EXPR_LOCATION (t);
173 return tloc == UNKNOWN_LOCATION ? loc : tloc;
174 }
175
176 /* Similar to protected_set_expr_location, but never modify x in place,
177 if location can and needs to be set, unshare it. */
178
179 static inline tree
180 protected_set_expr_location_unshare (tree x, location_t loc)
181 {
182 if (CAN_HAVE_LOCATION_P (x)
183 && EXPR_LOCATION (x) != loc
184 && !(TREE_CODE (x) == SAVE_EXPR
185 || TREE_CODE (x) == TARGET_EXPR
186 || TREE_CODE (x) == BIND_EXPR))
187 {
188 x = copy_node (x);
189 SET_EXPR_LOCATION (x, loc);
190 }
191 return x;
192 }
193 \f
194 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
195 division and returns the quotient. Otherwise returns
196 NULL_TREE. */
197
198 tree
199 div_if_zero_remainder (const_tree arg1, const_tree arg2)
200 {
201 widest_int quo;
202
203 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
204 SIGNED, &quo))
205 return wide_int_to_tree (TREE_TYPE (arg1), quo);
206
207 return NULL_TREE;
208 }
209 \f
210 /* This is nonzero if we should defer warnings about undefined
211 overflow. This facility exists because these warnings are a
212 special case. The code to estimate loop iterations does not want
213 to issue any warnings, since it works with expressions which do not
214 occur in user code. Various bits of cleanup code call fold(), but
215 only use the result if it has certain characteristics (e.g., is a
216 constant); that code only wants to issue a warning if the result is
217 used. */
218
219 static int fold_deferring_overflow_warnings;
220
221 /* If a warning about undefined overflow is deferred, this is the
222 warning. Note that this may cause us to turn two warnings into
223 one, but that is fine since it is sufficient to only give one
224 warning per expression. */
225
226 static const char* fold_deferred_overflow_warning;
227
228 /* If a warning about undefined overflow is deferred, this is the
229 level at which the warning should be emitted. */
230
231 static enum warn_strict_overflow_code fold_deferred_overflow_code;
232
233 /* Start deferring overflow warnings. We could use a stack here to
234 permit nested calls, but at present it is not necessary. */
235
236 void
237 fold_defer_overflow_warnings (void)
238 {
239 ++fold_deferring_overflow_warnings;
240 }
241
242 /* Stop deferring overflow warnings. If there is a pending warning,
243 and ISSUE is true, then issue the warning if appropriate. STMT is
244 the statement with which the warning should be associated (used for
245 location information); STMT may be NULL. CODE is the level of the
246 warning--a warn_strict_overflow_code value. This function will use
247 the smaller of CODE and the deferred code when deciding whether to
248 issue the warning. CODE may be zero to mean to always use the
249 deferred code. */
250
251 void
252 fold_undefer_overflow_warnings (bool issue, const_gimple stmt, int code)
253 {
254 const char *warnmsg;
255 location_t locus;
256
257 gcc_assert (fold_deferring_overflow_warnings > 0);
258 --fold_deferring_overflow_warnings;
259 if (fold_deferring_overflow_warnings > 0)
260 {
261 if (fold_deferred_overflow_warning != NULL
262 && code != 0
263 && code < (int) fold_deferred_overflow_code)
264 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
265 return;
266 }
267
268 warnmsg = fold_deferred_overflow_warning;
269 fold_deferred_overflow_warning = NULL;
270
271 if (!issue || warnmsg == NULL)
272 return;
273
274 if (gimple_no_warning_p (stmt))
275 return;
276
277 /* Use the smallest code level when deciding to issue the
278 warning. */
279 if (code == 0 || code > (int) fold_deferred_overflow_code)
280 code = fold_deferred_overflow_code;
281
282 if (!issue_strict_overflow_warning (code))
283 return;
284
285 if (stmt == NULL)
286 locus = input_location;
287 else
288 locus = gimple_location (stmt);
289 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
290 }
291
292 /* Stop deferring overflow warnings, ignoring any deferred
293 warnings. */
294
295 void
296 fold_undefer_and_ignore_overflow_warnings (void)
297 {
298 fold_undefer_overflow_warnings (false, NULL, 0);
299 }
300
301 /* Whether we are deferring overflow warnings. */
302
303 bool
304 fold_deferring_overflow_warnings_p (void)
305 {
306 return fold_deferring_overflow_warnings > 0;
307 }
308
309 /* This is called when we fold something based on the fact that signed
310 overflow is undefined. */
311
312 static void
313 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
314 {
315 if (fold_deferring_overflow_warnings > 0)
316 {
317 if (fold_deferred_overflow_warning == NULL
318 || wc < fold_deferred_overflow_code)
319 {
320 fold_deferred_overflow_warning = gmsgid;
321 fold_deferred_overflow_code = wc;
322 }
323 }
324 else if (issue_strict_overflow_warning (wc))
325 warning (OPT_Wstrict_overflow, gmsgid);
326 }
327 \f
328 /* Return true if the built-in mathematical function specified by CODE
329 is odd, i.e. -f(x) == f(-x). */
330
331 static bool
332 negate_mathfn_p (enum built_in_function code)
333 {
334 switch (code)
335 {
336 CASE_FLT_FN (BUILT_IN_ASIN):
337 CASE_FLT_FN (BUILT_IN_ASINH):
338 CASE_FLT_FN (BUILT_IN_ATAN):
339 CASE_FLT_FN (BUILT_IN_ATANH):
340 CASE_FLT_FN (BUILT_IN_CASIN):
341 CASE_FLT_FN (BUILT_IN_CASINH):
342 CASE_FLT_FN (BUILT_IN_CATAN):
343 CASE_FLT_FN (BUILT_IN_CATANH):
344 CASE_FLT_FN (BUILT_IN_CBRT):
345 CASE_FLT_FN (BUILT_IN_CPROJ):
346 CASE_FLT_FN (BUILT_IN_CSIN):
347 CASE_FLT_FN (BUILT_IN_CSINH):
348 CASE_FLT_FN (BUILT_IN_CTAN):
349 CASE_FLT_FN (BUILT_IN_CTANH):
350 CASE_FLT_FN (BUILT_IN_ERF):
351 CASE_FLT_FN (BUILT_IN_LLROUND):
352 CASE_FLT_FN (BUILT_IN_LROUND):
353 CASE_FLT_FN (BUILT_IN_ROUND):
354 CASE_FLT_FN (BUILT_IN_SIN):
355 CASE_FLT_FN (BUILT_IN_SINH):
356 CASE_FLT_FN (BUILT_IN_TAN):
357 CASE_FLT_FN (BUILT_IN_TANH):
358 CASE_FLT_FN (BUILT_IN_TRUNC):
359 return true;
360
361 CASE_FLT_FN (BUILT_IN_LLRINT):
362 CASE_FLT_FN (BUILT_IN_LRINT):
363 CASE_FLT_FN (BUILT_IN_NEARBYINT):
364 CASE_FLT_FN (BUILT_IN_RINT):
365 return !flag_rounding_math;
366
367 default:
368 break;
369 }
370 return false;
371 }
372
373 /* Check whether we may negate an integer constant T without causing
374 overflow. */
375
376 bool
377 may_negate_without_overflow_p (const_tree t)
378 {
379 tree type;
380
381 gcc_assert (TREE_CODE (t) == INTEGER_CST);
382
383 type = TREE_TYPE (t);
384 if (TYPE_UNSIGNED (type))
385 return false;
386
387 return !wi::only_sign_bit_p (t);
388 }
389
390 /* Determine whether an expression T can be cheaply negated using
391 the function negate_expr without introducing undefined overflow. */
392
393 static bool
394 negate_expr_p (tree t)
395 {
396 tree type;
397
398 if (t == 0)
399 return false;
400
401 type = TREE_TYPE (t);
402
403 STRIP_SIGN_NOPS (t);
404 switch (TREE_CODE (t))
405 {
406 case INTEGER_CST:
407 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
408 return true;
409
410 /* Check that -CST will not overflow type. */
411 return may_negate_without_overflow_p (t);
412 case BIT_NOT_EXPR:
413 return (INTEGRAL_TYPE_P (type)
414 && TYPE_OVERFLOW_WRAPS (type));
415
416 case FIXED_CST:
417 return true;
418
419 case NEGATE_EXPR:
420 return !TYPE_OVERFLOW_SANITIZED (type);
421
422 case REAL_CST:
423 /* We want to canonicalize to positive real constants. Pretend
424 that only negative ones can be easily negated. */
425 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
426
427 case COMPLEX_CST:
428 return negate_expr_p (TREE_REALPART (t))
429 && negate_expr_p (TREE_IMAGPART (t));
430
431 case VECTOR_CST:
432 {
433 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
434 return true;
435
436 int count = TYPE_VECTOR_SUBPARTS (type), i;
437
438 for (i = 0; i < count; i++)
439 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
440 return false;
441
442 return true;
443 }
444
445 case COMPLEX_EXPR:
446 return negate_expr_p (TREE_OPERAND (t, 0))
447 && negate_expr_p (TREE_OPERAND (t, 1));
448
449 case CONJ_EXPR:
450 return negate_expr_p (TREE_OPERAND (t, 0));
451
452 case PLUS_EXPR:
453 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
454 || HONOR_SIGNED_ZEROS (element_mode (type)))
455 return false;
456 /* -(A + B) -> (-B) - A. */
457 if (negate_expr_p (TREE_OPERAND (t, 1))
458 && reorder_operands_p (TREE_OPERAND (t, 0),
459 TREE_OPERAND (t, 1)))
460 return true;
461 /* -(A + B) -> (-A) - B. */
462 return negate_expr_p (TREE_OPERAND (t, 0));
463
464 case MINUS_EXPR:
465 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
466 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
467 && !HONOR_SIGNED_ZEROS (element_mode (type))
468 && reorder_operands_p (TREE_OPERAND (t, 0),
469 TREE_OPERAND (t, 1));
470
471 case MULT_EXPR:
472 if (TYPE_UNSIGNED (TREE_TYPE (t)))
473 break;
474
475 /* Fall through. */
476
477 case RDIV_EXPR:
478 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
479 return negate_expr_p (TREE_OPERAND (t, 1))
480 || negate_expr_p (TREE_OPERAND (t, 0));
481 break;
482
483 case TRUNC_DIV_EXPR:
484 case ROUND_DIV_EXPR:
485 case EXACT_DIV_EXPR:
486 /* In general we can't negate A / B, because if A is INT_MIN and
487 B is 1, we may turn this into INT_MIN / -1 which is undefined
488 and actually traps on some architectures. But if overflow is
489 undefined, we can negate, because - (INT_MIN / 1) is an
490 overflow. */
491 if (INTEGRAL_TYPE_P (TREE_TYPE (t)))
492 {
493 if (!TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t)))
494 break;
495 /* If overflow is undefined then we have to be careful because
496 we ask whether it's ok to associate the negate with the
497 division which is not ok for example for
498 -((a - b) / c) where (-(a - b)) / c may invoke undefined
499 overflow because of negating INT_MIN. So do not use
500 negate_expr_p here but open-code the two important cases. */
501 if (TREE_CODE (TREE_OPERAND (t, 0)) == NEGATE_EXPR
502 || (TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
503 && may_negate_without_overflow_p (TREE_OPERAND (t, 0))))
504 return true;
505 }
506 else if (negate_expr_p (TREE_OPERAND (t, 0)))
507 return true;
508 return negate_expr_p (TREE_OPERAND (t, 1));
509
510 case NOP_EXPR:
511 /* Negate -((double)float) as (double)(-float). */
512 if (TREE_CODE (type) == REAL_TYPE)
513 {
514 tree tem = strip_float_extensions (t);
515 if (tem != t)
516 return negate_expr_p (tem);
517 }
518 break;
519
520 case CALL_EXPR:
521 /* Negate -f(x) as f(-x). */
522 if (negate_mathfn_p (builtin_mathfn_code (t)))
523 return negate_expr_p (CALL_EXPR_ARG (t, 0));
524 break;
525
526 case RSHIFT_EXPR:
527 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
528 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
529 {
530 tree op1 = TREE_OPERAND (t, 1);
531 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
532 return true;
533 }
534 break;
535
536 default:
537 break;
538 }
539 return false;
540 }
541
542 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
543 simplification is possible.
544 If negate_expr_p would return true for T, NULL_TREE will never be
545 returned. */
546
547 static tree
548 fold_negate_expr (location_t loc, tree t)
549 {
550 tree type = TREE_TYPE (t);
551 tree tem;
552
553 switch (TREE_CODE (t))
554 {
555 /* Convert - (~A) to A + 1. */
556 case BIT_NOT_EXPR:
557 if (INTEGRAL_TYPE_P (type))
558 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
559 build_one_cst (type));
560 break;
561
562 case INTEGER_CST:
563 tem = fold_negate_const (t, type);
564 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
565 || (ANY_INTEGRAL_TYPE_P (type)
566 && !TYPE_OVERFLOW_TRAPS (type)
567 && TYPE_OVERFLOW_WRAPS (type))
568 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
569 return tem;
570 break;
571
572 case REAL_CST:
573 tem = fold_negate_const (t, type);
574 return tem;
575
576 case FIXED_CST:
577 tem = fold_negate_const (t, type);
578 return tem;
579
580 case COMPLEX_CST:
581 {
582 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
583 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
584 if (rpart && ipart)
585 return build_complex (type, rpart, ipart);
586 }
587 break;
588
589 case VECTOR_CST:
590 {
591 int count = TYPE_VECTOR_SUBPARTS (type), i;
592 tree *elts = XALLOCAVEC (tree, count);
593
594 for (i = 0; i < count; i++)
595 {
596 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
597 if (elts[i] == NULL_TREE)
598 return NULL_TREE;
599 }
600
601 return build_vector (type, elts);
602 }
603
604 case COMPLEX_EXPR:
605 if (negate_expr_p (t))
606 return fold_build2_loc (loc, COMPLEX_EXPR, type,
607 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
608 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
609 break;
610
611 case CONJ_EXPR:
612 if (negate_expr_p (t))
613 return fold_build1_loc (loc, CONJ_EXPR, type,
614 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
615 break;
616
617 case NEGATE_EXPR:
618 if (!TYPE_OVERFLOW_SANITIZED (type))
619 return TREE_OPERAND (t, 0);
620 break;
621
622 case PLUS_EXPR:
623 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
624 && !HONOR_SIGNED_ZEROS (element_mode (type)))
625 {
626 /* -(A + B) -> (-B) - A. */
627 if (negate_expr_p (TREE_OPERAND (t, 1))
628 && reorder_operands_p (TREE_OPERAND (t, 0),
629 TREE_OPERAND (t, 1)))
630 {
631 tem = negate_expr (TREE_OPERAND (t, 1));
632 return fold_build2_loc (loc, MINUS_EXPR, type,
633 tem, TREE_OPERAND (t, 0));
634 }
635
636 /* -(A + B) -> (-A) - B. */
637 if (negate_expr_p (TREE_OPERAND (t, 0)))
638 {
639 tem = negate_expr (TREE_OPERAND (t, 0));
640 return fold_build2_loc (loc, MINUS_EXPR, type,
641 tem, TREE_OPERAND (t, 1));
642 }
643 }
644 break;
645
646 case MINUS_EXPR:
647 /* - (A - B) -> B - A */
648 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
649 && !HONOR_SIGNED_ZEROS (element_mode (type))
650 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
651 return fold_build2_loc (loc, MINUS_EXPR, type,
652 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
653 break;
654
655 case MULT_EXPR:
656 if (TYPE_UNSIGNED (type))
657 break;
658
659 /* Fall through. */
660
661 case RDIV_EXPR:
662 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
663 {
664 tem = TREE_OPERAND (t, 1);
665 if (negate_expr_p (tem))
666 return fold_build2_loc (loc, TREE_CODE (t), type,
667 TREE_OPERAND (t, 0), negate_expr (tem));
668 tem = TREE_OPERAND (t, 0);
669 if (negate_expr_p (tem))
670 return fold_build2_loc (loc, TREE_CODE (t), type,
671 negate_expr (tem), TREE_OPERAND (t, 1));
672 }
673 break;
674
675 case TRUNC_DIV_EXPR:
676 case ROUND_DIV_EXPR:
677 case EXACT_DIV_EXPR:
678 /* In general we can't negate A / B, because if A is INT_MIN and
679 B is 1, we may turn this into INT_MIN / -1 which is undefined
680 and actually traps on some architectures. But if overflow is
681 undefined, we can negate, because - (INT_MIN / 1) is an
682 overflow. */
683 if (!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
684 {
685 const char * const warnmsg = G_("assuming signed overflow does not "
686 "occur when negating a division");
687 tem = TREE_OPERAND (t, 1);
688 if (negate_expr_p (tem))
689 {
690 if (INTEGRAL_TYPE_P (type)
691 && (TREE_CODE (tem) != INTEGER_CST
692 || integer_onep (tem)))
693 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MISC);
694 return fold_build2_loc (loc, TREE_CODE (t), type,
695 TREE_OPERAND (t, 0), negate_expr (tem));
696 }
697 /* If overflow is undefined then we have to be careful because
698 we ask whether it's ok to associate the negate with the
699 division which is not ok for example for
700 -((a - b) / c) where (-(a - b)) / c may invoke undefined
701 overflow because of negating INT_MIN. So do not use
702 negate_expr_p here but open-code the two important cases. */
703 tem = TREE_OPERAND (t, 0);
704 if ((INTEGRAL_TYPE_P (type)
705 && (TREE_CODE (tem) == NEGATE_EXPR
706 || (TREE_CODE (tem) == INTEGER_CST
707 && may_negate_without_overflow_p (tem))))
708 || !INTEGRAL_TYPE_P (type))
709 return fold_build2_loc (loc, TREE_CODE (t), type,
710 negate_expr (tem), TREE_OPERAND (t, 1));
711 }
712 break;
713
714 case NOP_EXPR:
715 /* Convert -((double)float) into (double)(-float). */
716 if (TREE_CODE (type) == REAL_TYPE)
717 {
718 tem = strip_float_extensions (t);
719 if (tem != t && negate_expr_p (tem))
720 return fold_convert_loc (loc, type, negate_expr (tem));
721 }
722 break;
723
724 case CALL_EXPR:
725 /* Negate -f(x) as f(-x). */
726 if (negate_mathfn_p (builtin_mathfn_code (t))
727 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
728 {
729 tree fndecl, arg;
730
731 fndecl = get_callee_fndecl (t);
732 arg = negate_expr (CALL_EXPR_ARG (t, 0));
733 return build_call_expr_loc (loc, fndecl, 1, arg);
734 }
735 break;
736
737 case RSHIFT_EXPR:
738 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
739 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
740 {
741 tree op1 = TREE_OPERAND (t, 1);
742 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
743 {
744 tree ntype = TYPE_UNSIGNED (type)
745 ? signed_type_for (type)
746 : unsigned_type_for (type);
747 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
748 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
749 return fold_convert_loc (loc, type, temp);
750 }
751 }
752 break;
753
754 default:
755 break;
756 }
757
758 return NULL_TREE;
759 }
760
761 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
762 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
763 return NULL_TREE. */
764
765 static tree
766 negate_expr (tree t)
767 {
768 tree type, tem;
769 location_t loc;
770
771 if (t == NULL_TREE)
772 return NULL_TREE;
773
774 loc = EXPR_LOCATION (t);
775 type = TREE_TYPE (t);
776 STRIP_SIGN_NOPS (t);
777
778 tem = fold_negate_expr (loc, t);
779 if (!tem)
780 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
781 return fold_convert_loc (loc, type, tem);
782 }
783 \f
784 /* Split a tree IN into a constant, literal and variable parts that could be
785 combined with CODE to make IN. "constant" means an expression with
786 TREE_CONSTANT but that isn't an actual constant. CODE must be a
787 commutative arithmetic operation. Store the constant part into *CONP,
788 the literal in *LITP and return the variable part. If a part isn't
789 present, set it to null. If the tree does not decompose in this way,
790 return the entire tree as the variable part and the other parts as null.
791
792 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
793 case, we negate an operand that was subtracted. Except if it is a
794 literal for which we use *MINUS_LITP instead.
795
796 If NEGATE_P is true, we are negating all of IN, again except a literal
797 for which we use *MINUS_LITP instead.
798
799 If IN is itself a literal or constant, return it as appropriate.
800
801 Note that we do not guarantee that any of the three values will be the
802 same type as IN, but they will have the same signedness and mode. */
803
804 static tree
805 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
806 tree *minus_litp, int negate_p)
807 {
808 tree var = 0;
809
810 *conp = 0;
811 *litp = 0;
812 *minus_litp = 0;
813
814 /* Strip any conversions that don't change the machine mode or signedness. */
815 STRIP_SIGN_NOPS (in);
816
817 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
818 || TREE_CODE (in) == FIXED_CST)
819 *litp = in;
820 else if (TREE_CODE (in) == code
821 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
822 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
823 /* We can associate addition and subtraction together (even
824 though the C standard doesn't say so) for integers because
825 the value is not affected. For reals, the value might be
826 affected, so we can't. */
827 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
828 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
829 {
830 tree op0 = TREE_OPERAND (in, 0);
831 tree op1 = TREE_OPERAND (in, 1);
832 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
833 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
834
835 /* First see if either of the operands is a literal, then a constant. */
836 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
837 || TREE_CODE (op0) == FIXED_CST)
838 *litp = op0, op0 = 0;
839 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
840 || TREE_CODE (op1) == FIXED_CST)
841 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
842
843 if (op0 != 0 && TREE_CONSTANT (op0))
844 *conp = op0, op0 = 0;
845 else if (op1 != 0 && TREE_CONSTANT (op1))
846 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
847
848 /* If we haven't dealt with either operand, this is not a case we can
849 decompose. Otherwise, VAR is either of the ones remaining, if any. */
850 if (op0 != 0 && op1 != 0)
851 var = in;
852 else if (op0 != 0)
853 var = op0;
854 else
855 var = op1, neg_var_p = neg1_p;
856
857 /* Now do any needed negations. */
858 if (neg_litp_p)
859 *minus_litp = *litp, *litp = 0;
860 if (neg_conp_p)
861 *conp = negate_expr (*conp);
862 if (neg_var_p)
863 var = negate_expr (var);
864 }
865 else if (TREE_CODE (in) == BIT_NOT_EXPR
866 && code == PLUS_EXPR)
867 {
868 /* -X - 1 is folded to ~X, undo that here. */
869 *minus_litp = build_one_cst (TREE_TYPE (in));
870 var = negate_expr (TREE_OPERAND (in, 0));
871 }
872 else if (TREE_CONSTANT (in))
873 *conp = in;
874 else
875 var = in;
876
877 if (negate_p)
878 {
879 if (*litp)
880 *minus_litp = *litp, *litp = 0;
881 else if (*minus_litp)
882 *litp = *minus_litp, *minus_litp = 0;
883 *conp = negate_expr (*conp);
884 var = negate_expr (var);
885 }
886
887 return var;
888 }
889
890 /* Re-associate trees split by the above function. T1 and T2 are
891 either expressions to associate or null. Return the new
892 expression, if any. LOC is the location of the new expression. If
893 we build an operation, do it in TYPE and with CODE. */
894
895 static tree
896 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
897 {
898 if (t1 == 0)
899 return t2;
900 else if (t2 == 0)
901 return t1;
902
903 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
904 try to fold this since we will have infinite recursion. But do
905 deal with any NEGATE_EXPRs. */
906 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
907 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
908 {
909 if (code == PLUS_EXPR)
910 {
911 if (TREE_CODE (t1) == NEGATE_EXPR)
912 return build2_loc (loc, MINUS_EXPR, type,
913 fold_convert_loc (loc, type, t2),
914 fold_convert_loc (loc, type,
915 TREE_OPERAND (t1, 0)));
916 else if (TREE_CODE (t2) == NEGATE_EXPR)
917 return build2_loc (loc, MINUS_EXPR, type,
918 fold_convert_loc (loc, type, t1),
919 fold_convert_loc (loc, type,
920 TREE_OPERAND (t2, 0)));
921 else if (integer_zerop (t2))
922 return fold_convert_loc (loc, type, t1);
923 }
924 else if (code == MINUS_EXPR)
925 {
926 if (integer_zerop (t2))
927 return fold_convert_loc (loc, type, t1);
928 }
929
930 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
931 fold_convert_loc (loc, type, t2));
932 }
933
934 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
935 fold_convert_loc (loc, type, t2));
936 }
937 \f
938 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
939 for use in int_const_binop, size_binop and size_diffop. */
940
941 static bool
942 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
943 {
944 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
945 return false;
946 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
947 return false;
948
949 switch (code)
950 {
951 case LSHIFT_EXPR:
952 case RSHIFT_EXPR:
953 case LROTATE_EXPR:
954 case RROTATE_EXPR:
955 return true;
956
957 default:
958 break;
959 }
960
961 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
962 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
963 && TYPE_MODE (type1) == TYPE_MODE (type2);
964 }
965
966
967 /* Combine two integer constants ARG1 and ARG2 under operation CODE
968 to produce a new constant. Return NULL_TREE if we don't know how
969 to evaluate CODE at compile-time. */
970
971 static tree
972 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
973 int overflowable)
974 {
975 wide_int res;
976 tree t;
977 tree type = TREE_TYPE (arg1);
978 signop sign = TYPE_SIGN (type);
979 bool overflow = false;
980
981 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
982 TYPE_SIGN (TREE_TYPE (parg2)));
983
984 switch (code)
985 {
986 case BIT_IOR_EXPR:
987 res = wi::bit_or (arg1, arg2);
988 break;
989
990 case BIT_XOR_EXPR:
991 res = wi::bit_xor (arg1, arg2);
992 break;
993
994 case BIT_AND_EXPR:
995 res = wi::bit_and (arg1, arg2);
996 break;
997
998 case RSHIFT_EXPR:
999 case LSHIFT_EXPR:
1000 if (wi::neg_p (arg2))
1001 {
1002 arg2 = -arg2;
1003 if (code == RSHIFT_EXPR)
1004 code = LSHIFT_EXPR;
1005 else
1006 code = RSHIFT_EXPR;
1007 }
1008
1009 if (code == RSHIFT_EXPR)
1010 /* It's unclear from the C standard whether shifts can overflow.
1011 The following code ignores overflow; perhaps a C standard
1012 interpretation ruling is needed. */
1013 res = wi::rshift (arg1, arg2, sign);
1014 else
1015 res = wi::lshift (arg1, arg2);
1016 break;
1017
1018 case RROTATE_EXPR:
1019 case LROTATE_EXPR:
1020 if (wi::neg_p (arg2))
1021 {
1022 arg2 = -arg2;
1023 if (code == RROTATE_EXPR)
1024 code = LROTATE_EXPR;
1025 else
1026 code = RROTATE_EXPR;
1027 }
1028
1029 if (code == RROTATE_EXPR)
1030 res = wi::rrotate (arg1, arg2);
1031 else
1032 res = wi::lrotate (arg1, arg2);
1033 break;
1034
1035 case PLUS_EXPR:
1036 res = wi::add (arg1, arg2, sign, &overflow);
1037 break;
1038
1039 case MINUS_EXPR:
1040 res = wi::sub (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case MULT_EXPR:
1044 res = wi::mul (arg1, arg2, sign, &overflow);
1045 break;
1046
1047 case MULT_HIGHPART_EXPR:
1048 res = wi::mul_high (arg1, arg2, sign);
1049 break;
1050
1051 case TRUNC_DIV_EXPR:
1052 case EXACT_DIV_EXPR:
1053 if (arg2 == 0)
1054 return NULL_TREE;
1055 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1056 break;
1057
1058 case FLOOR_DIV_EXPR:
1059 if (arg2 == 0)
1060 return NULL_TREE;
1061 res = wi::div_floor (arg1, arg2, sign, &overflow);
1062 break;
1063
1064 case CEIL_DIV_EXPR:
1065 if (arg2 == 0)
1066 return NULL_TREE;
1067 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1068 break;
1069
1070 case ROUND_DIV_EXPR:
1071 if (arg2 == 0)
1072 return NULL_TREE;
1073 res = wi::div_round (arg1, arg2, sign, &overflow);
1074 break;
1075
1076 case TRUNC_MOD_EXPR:
1077 if (arg2 == 0)
1078 return NULL_TREE;
1079 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1080 break;
1081
1082 case FLOOR_MOD_EXPR:
1083 if (arg2 == 0)
1084 return NULL_TREE;
1085 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1086 break;
1087
1088 case CEIL_MOD_EXPR:
1089 if (arg2 == 0)
1090 return NULL_TREE;
1091 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1092 break;
1093
1094 case ROUND_MOD_EXPR:
1095 if (arg2 == 0)
1096 return NULL_TREE;
1097 res = wi::mod_round (arg1, arg2, sign, &overflow);
1098 break;
1099
1100 case MIN_EXPR:
1101 res = wi::min (arg1, arg2, sign);
1102 break;
1103
1104 case MAX_EXPR:
1105 res = wi::max (arg1, arg2, sign);
1106 break;
1107
1108 default:
1109 return NULL_TREE;
1110 }
1111
1112 t = force_fit_type (type, res, overflowable,
1113 (((sign == SIGNED || overflowable == -1)
1114 && overflow)
1115 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1116
1117 return t;
1118 }
1119
1120 tree
1121 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1122 {
1123 return int_const_binop_1 (code, arg1, arg2, 1);
1124 }
1125
1126 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1127 constant. We assume ARG1 and ARG2 have the same data type, or at least
1128 are the same kind of constant and the same machine mode. Return zero if
1129 combining the constants is not allowed in the current operating mode. */
1130
1131 static tree
1132 const_binop (enum tree_code code, tree arg1, tree arg2)
1133 {
1134 /* Sanity check for the recursive cases. */
1135 if (!arg1 || !arg2)
1136 return NULL_TREE;
1137
1138 STRIP_NOPS (arg1);
1139 STRIP_NOPS (arg2);
1140
1141 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1142 {
1143 if (code == POINTER_PLUS_EXPR)
1144 return int_const_binop (PLUS_EXPR,
1145 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1146
1147 return int_const_binop (code, arg1, arg2);
1148 }
1149
1150 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1151 {
1152 machine_mode mode;
1153 REAL_VALUE_TYPE d1;
1154 REAL_VALUE_TYPE d2;
1155 REAL_VALUE_TYPE value;
1156 REAL_VALUE_TYPE result;
1157 bool inexact;
1158 tree t, type;
1159
1160 /* The following codes are handled by real_arithmetic. */
1161 switch (code)
1162 {
1163 case PLUS_EXPR:
1164 case MINUS_EXPR:
1165 case MULT_EXPR:
1166 case RDIV_EXPR:
1167 case MIN_EXPR:
1168 case MAX_EXPR:
1169 break;
1170
1171 default:
1172 return NULL_TREE;
1173 }
1174
1175 d1 = TREE_REAL_CST (arg1);
1176 d2 = TREE_REAL_CST (arg2);
1177
1178 type = TREE_TYPE (arg1);
1179 mode = TYPE_MODE (type);
1180
1181 /* Don't perform operation if we honor signaling NaNs and
1182 either operand is a NaN. */
1183 if (HONOR_SNANS (mode)
1184 && (REAL_VALUE_ISNAN (d1) || REAL_VALUE_ISNAN (d2)))
1185 return NULL_TREE;
1186
1187 /* Don't perform operation if it would raise a division
1188 by zero exception. */
1189 if (code == RDIV_EXPR
1190 && REAL_VALUES_EQUAL (d2, dconst0)
1191 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1192 return NULL_TREE;
1193
1194 /* If either operand is a NaN, just return it. Otherwise, set up
1195 for floating-point trap; we return an overflow. */
1196 if (REAL_VALUE_ISNAN (d1))
1197 return arg1;
1198 else if (REAL_VALUE_ISNAN (d2))
1199 return arg2;
1200
1201 inexact = real_arithmetic (&value, code, &d1, &d2);
1202 real_convert (&result, mode, &value);
1203
1204 /* Don't constant fold this floating point operation if
1205 the result has overflowed and flag_trapping_math. */
1206 if (flag_trapping_math
1207 && MODE_HAS_INFINITIES (mode)
1208 && REAL_VALUE_ISINF (result)
1209 && !REAL_VALUE_ISINF (d1)
1210 && !REAL_VALUE_ISINF (d2))
1211 return NULL_TREE;
1212
1213 /* Don't constant fold this floating point operation if the
1214 result may dependent upon the run-time rounding mode and
1215 flag_rounding_math is set, or if GCC's software emulation
1216 is unable to accurately represent the result. */
1217 if ((flag_rounding_math
1218 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1219 && (inexact || !real_identical (&result, &value)))
1220 return NULL_TREE;
1221
1222 t = build_real (type, result);
1223
1224 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1225 return t;
1226 }
1227
1228 if (TREE_CODE (arg1) == FIXED_CST)
1229 {
1230 FIXED_VALUE_TYPE f1;
1231 FIXED_VALUE_TYPE f2;
1232 FIXED_VALUE_TYPE result;
1233 tree t, type;
1234 int sat_p;
1235 bool overflow_p;
1236
1237 /* The following codes are handled by fixed_arithmetic. */
1238 switch (code)
1239 {
1240 case PLUS_EXPR:
1241 case MINUS_EXPR:
1242 case MULT_EXPR:
1243 case TRUNC_DIV_EXPR:
1244 if (TREE_CODE (arg2) != FIXED_CST)
1245 return NULL_TREE;
1246 f2 = TREE_FIXED_CST (arg2);
1247 break;
1248
1249 case LSHIFT_EXPR:
1250 case RSHIFT_EXPR:
1251 {
1252 if (TREE_CODE (arg2) != INTEGER_CST)
1253 return NULL_TREE;
1254 wide_int w2 = arg2;
1255 f2.data.high = w2.elt (1);
1256 f2.data.low = w2.elt (0);
1257 f2.mode = SImode;
1258 }
1259 break;
1260
1261 default:
1262 return NULL_TREE;
1263 }
1264
1265 f1 = TREE_FIXED_CST (arg1);
1266 type = TREE_TYPE (arg1);
1267 sat_p = TYPE_SATURATING (type);
1268 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1269 t = build_fixed (type, result);
1270 /* Propagate overflow flags. */
1271 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1272 TREE_OVERFLOW (t) = 1;
1273 return t;
1274 }
1275
1276 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1277 {
1278 tree type = TREE_TYPE (arg1);
1279 tree r1 = TREE_REALPART (arg1);
1280 tree i1 = TREE_IMAGPART (arg1);
1281 tree r2 = TREE_REALPART (arg2);
1282 tree i2 = TREE_IMAGPART (arg2);
1283 tree real, imag;
1284
1285 switch (code)
1286 {
1287 case PLUS_EXPR:
1288 case MINUS_EXPR:
1289 real = const_binop (code, r1, r2);
1290 imag = const_binop (code, i1, i2);
1291 break;
1292
1293 case MULT_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_mul);
1298
1299 real = const_binop (MINUS_EXPR,
1300 const_binop (MULT_EXPR, r1, r2),
1301 const_binop (MULT_EXPR, i1, i2));
1302 imag = const_binop (PLUS_EXPR,
1303 const_binop (MULT_EXPR, r1, i2),
1304 const_binop (MULT_EXPR, i1, r2));
1305 break;
1306
1307 case RDIV_EXPR:
1308 if (COMPLEX_FLOAT_TYPE_P (type))
1309 return do_mpc_arg2 (arg1, arg2, type,
1310 /* do_nonfinite= */ folding_initializer,
1311 mpc_div);
1312 /* Fallthru ... */
1313 case TRUNC_DIV_EXPR:
1314 case CEIL_DIV_EXPR:
1315 case FLOOR_DIV_EXPR:
1316 case ROUND_DIV_EXPR:
1317 if (flag_complex_method == 0)
1318 {
1319 /* Keep this algorithm in sync with
1320 tree-complex.c:expand_complex_div_straight().
1321
1322 Expand complex division to scalars, straightforward algorithm.
1323 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1324 t = br*br + bi*bi
1325 */
1326 tree magsquared
1327 = const_binop (PLUS_EXPR,
1328 const_binop (MULT_EXPR, r2, r2),
1329 const_binop (MULT_EXPR, i2, i2));
1330 tree t1
1331 = const_binop (PLUS_EXPR,
1332 const_binop (MULT_EXPR, r1, r2),
1333 const_binop (MULT_EXPR, i1, i2));
1334 tree t2
1335 = const_binop (MINUS_EXPR,
1336 const_binop (MULT_EXPR, i1, r2),
1337 const_binop (MULT_EXPR, r1, i2));
1338
1339 real = const_binop (code, t1, magsquared);
1340 imag = const_binop (code, t2, magsquared);
1341 }
1342 else
1343 {
1344 /* Keep this algorithm in sync with
1345 tree-complex.c:expand_complex_div_wide().
1346
1347 Expand complex division to scalars, modified algorithm to minimize
1348 overflow with wide input ranges. */
1349 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1350 fold_abs_const (r2, TREE_TYPE (type)),
1351 fold_abs_const (i2, TREE_TYPE (type)));
1352
1353 if (integer_nonzerop (compare))
1354 {
1355 /* In the TRUE branch, we compute
1356 ratio = br/bi;
1357 div = (br * ratio) + bi;
1358 tr = (ar * ratio) + ai;
1359 ti = (ai * ratio) - ar;
1360 tr = tr / div;
1361 ti = ti / div; */
1362 tree ratio = const_binop (code, r2, i2);
1363 tree div = const_binop (PLUS_EXPR, i2,
1364 const_binop (MULT_EXPR, r2, ratio));
1365 real = const_binop (MULT_EXPR, r1, ratio);
1366 real = const_binop (PLUS_EXPR, real, i1);
1367 real = const_binop (code, real, div);
1368
1369 imag = const_binop (MULT_EXPR, i1, ratio);
1370 imag = const_binop (MINUS_EXPR, imag, r1);
1371 imag = const_binop (code, imag, div);
1372 }
1373 else
1374 {
1375 /* In the FALSE branch, we compute
1376 ratio = d/c;
1377 divisor = (d * ratio) + c;
1378 tr = (b * ratio) + a;
1379 ti = b - (a * ratio);
1380 tr = tr / div;
1381 ti = ti / div; */
1382 tree ratio = const_binop (code, i2, r2);
1383 tree div = const_binop (PLUS_EXPR, r2,
1384 const_binop (MULT_EXPR, i2, ratio));
1385
1386 real = const_binop (MULT_EXPR, i1, ratio);
1387 real = const_binop (PLUS_EXPR, real, r1);
1388 real = const_binop (code, real, div);
1389
1390 imag = const_binop (MULT_EXPR, r1, ratio);
1391 imag = const_binop (MINUS_EXPR, i1, imag);
1392 imag = const_binop (code, imag, div);
1393 }
1394 }
1395 break;
1396
1397 default:
1398 return NULL_TREE;
1399 }
1400
1401 if (real && imag)
1402 return build_complex (type, real, imag);
1403 }
1404
1405 if (TREE_CODE (arg1) == VECTOR_CST
1406 && TREE_CODE (arg2) == VECTOR_CST)
1407 {
1408 tree type = TREE_TYPE (arg1);
1409 int count = TYPE_VECTOR_SUBPARTS (type), i;
1410 tree *elts = XALLOCAVEC (tree, count);
1411
1412 for (i = 0; i < count; i++)
1413 {
1414 tree elem1 = VECTOR_CST_ELT (arg1, i);
1415 tree elem2 = VECTOR_CST_ELT (arg2, i);
1416
1417 elts[i] = const_binop (code, elem1, elem2);
1418
1419 /* It is possible that const_binop cannot handle the given
1420 code and return NULL_TREE */
1421 if (elts[i] == NULL_TREE)
1422 return NULL_TREE;
1423 }
1424
1425 return build_vector (type, elts);
1426 }
1427
1428 /* Shifts allow a scalar offset for a vector. */
1429 if (TREE_CODE (arg1) == VECTOR_CST
1430 && TREE_CODE (arg2) == INTEGER_CST)
1431 {
1432 tree type = TREE_TYPE (arg1);
1433 int count = TYPE_VECTOR_SUBPARTS (type), i;
1434 tree *elts = XALLOCAVEC (tree, count);
1435
1436 for (i = 0; i < count; i++)
1437 {
1438 tree elem1 = VECTOR_CST_ELT (arg1, i);
1439
1440 elts[i] = const_binop (code, elem1, arg2);
1441
1442 /* It is possible that const_binop cannot handle the given
1443 code and return NULL_TREE. */
1444 if (elts[i] == NULL_TREE)
1445 return NULL_TREE;
1446 }
1447
1448 return build_vector (type, elts);
1449 }
1450 return NULL_TREE;
1451 }
1452
1453 /* Overload that adds a TYPE parameter to be able to dispatch
1454 to fold_relational_const. */
1455
1456 tree
1457 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1458 {
1459 if (TREE_CODE_CLASS (code) == tcc_comparison)
1460 return fold_relational_const (code, type, arg1, arg2);
1461
1462 /* ??? Until we make the const_binop worker take the type of the
1463 result as argument put those cases that need it here. */
1464 switch (code)
1465 {
1466 case COMPLEX_EXPR:
1467 if ((TREE_CODE (arg1) == REAL_CST
1468 && TREE_CODE (arg2) == REAL_CST)
1469 || (TREE_CODE (arg1) == INTEGER_CST
1470 && TREE_CODE (arg2) == INTEGER_CST))
1471 return build_complex (type, arg1, arg2);
1472 return NULL_TREE;
1473
1474 case VEC_PACK_TRUNC_EXPR:
1475 case VEC_PACK_FIX_TRUNC_EXPR:
1476 {
1477 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1478 tree *elts;
1479
1480 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1481 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1482 if (TREE_CODE (arg1) != VECTOR_CST
1483 || TREE_CODE (arg2) != VECTOR_CST)
1484 return NULL_TREE;
1485
1486 elts = XALLOCAVEC (tree, nelts);
1487 if (!vec_cst_ctor_to_array (arg1, elts)
1488 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1489 return NULL_TREE;
1490
1491 for (i = 0; i < nelts; i++)
1492 {
1493 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1494 ? NOP_EXPR : FIX_TRUNC_EXPR,
1495 TREE_TYPE (type), elts[i]);
1496 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1497 return NULL_TREE;
1498 }
1499
1500 return build_vector (type, elts);
1501 }
1502
1503 case VEC_WIDEN_MULT_LO_EXPR:
1504 case VEC_WIDEN_MULT_HI_EXPR:
1505 case VEC_WIDEN_MULT_EVEN_EXPR:
1506 case VEC_WIDEN_MULT_ODD_EXPR:
1507 {
1508 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1509 unsigned int out, ofs, scale;
1510 tree *elts;
1511
1512 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1513 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1514 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1515 return NULL_TREE;
1516
1517 elts = XALLOCAVEC (tree, nelts * 4);
1518 if (!vec_cst_ctor_to_array (arg1, elts)
1519 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1520 return NULL_TREE;
1521
1522 if (code == VEC_WIDEN_MULT_LO_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1524 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1525 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1526 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1527 scale = 1, ofs = 0;
1528 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1529 scale = 1, ofs = 1;
1530
1531 for (out = 0; out < nelts; out++)
1532 {
1533 unsigned int in1 = (out << scale) + ofs;
1534 unsigned int in2 = in1 + nelts * 2;
1535 tree t1, t2;
1536
1537 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1538 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1539
1540 if (t1 == NULL_TREE || t2 == NULL_TREE)
1541 return NULL_TREE;
1542 elts[out] = const_binop (MULT_EXPR, t1, t2);
1543 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1544 return NULL_TREE;
1545 }
1546
1547 return build_vector (type, elts);
1548 }
1549
1550 default:;
1551 }
1552
1553 if (TREE_CODE_CLASS (code) != tcc_binary)
1554 return NULL_TREE;
1555
1556 /* Make sure type and arg0 have the same saturating flag. */
1557 gcc_checking_assert (TYPE_SATURATING (type)
1558 == TYPE_SATURATING (TREE_TYPE (arg1)));
1559
1560 return const_binop (code, arg1, arg2);
1561 }
1562
1563 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1564 Return zero if computing the constants is not possible. */
1565
1566 tree
1567 const_unop (enum tree_code code, tree type, tree arg0)
1568 {
1569 switch (code)
1570 {
1571 CASE_CONVERT:
1572 case FLOAT_EXPR:
1573 case FIX_TRUNC_EXPR:
1574 case FIXED_CONVERT_EXPR:
1575 return fold_convert_const (code, type, arg0);
1576
1577 case ADDR_SPACE_CONVERT_EXPR:
1578 if (integer_zerop (arg0))
1579 return fold_convert_const (code, type, arg0);
1580 break;
1581
1582 case VIEW_CONVERT_EXPR:
1583 return fold_view_convert_expr (type, arg0);
1584
1585 case NEGATE_EXPR:
1586 {
1587 /* Can't call fold_negate_const directly here as that doesn't
1588 handle all cases and we might not be able to negate some
1589 constants. */
1590 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1591 if (tem && CONSTANT_CLASS_P (tem))
1592 return tem;
1593 break;
1594 }
1595
1596 case ABS_EXPR:
1597 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1598 return fold_abs_const (arg0, type);
1599 break;
1600
1601 case CONJ_EXPR:
1602 if (TREE_CODE (arg0) == COMPLEX_CST)
1603 {
1604 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1605 TREE_TYPE (type));
1606 return build_complex (type, TREE_REALPART (arg0), ipart);
1607 }
1608 break;
1609
1610 case BIT_NOT_EXPR:
1611 if (TREE_CODE (arg0) == INTEGER_CST)
1612 return fold_not_const (arg0, type);
1613 /* Perform BIT_NOT_EXPR on each element individually. */
1614 else if (TREE_CODE (arg0) == VECTOR_CST)
1615 {
1616 tree *elements;
1617 tree elem;
1618 unsigned count = VECTOR_CST_NELTS (arg0), i;
1619
1620 elements = XALLOCAVEC (tree, count);
1621 for (i = 0; i < count; i++)
1622 {
1623 elem = VECTOR_CST_ELT (arg0, i);
1624 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1625 if (elem == NULL_TREE)
1626 break;
1627 elements[i] = elem;
1628 }
1629 if (i == count)
1630 return build_vector (type, elements);
1631 }
1632 break;
1633
1634 case TRUTH_NOT_EXPR:
1635 if (TREE_CODE (arg0) == INTEGER_CST)
1636 return constant_boolean_node (integer_zerop (arg0), type);
1637 break;
1638
1639 case REALPART_EXPR:
1640 if (TREE_CODE (arg0) == COMPLEX_CST)
1641 return fold_convert (type, TREE_REALPART (arg0));
1642 break;
1643
1644 case IMAGPART_EXPR:
1645 if (TREE_CODE (arg0) == COMPLEX_CST)
1646 return fold_convert (type, TREE_IMAGPART (arg0));
1647 break;
1648
1649 case VEC_UNPACK_LO_EXPR:
1650 case VEC_UNPACK_HI_EXPR:
1651 case VEC_UNPACK_FLOAT_LO_EXPR:
1652 case VEC_UNPACK_FLOAT_HI_EXPR:
1653 {
1654 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1655 tree *elts;
1656 enum tree_code subcode;
1657
1658 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1659 if (TREE_CODE (arg0) != VECTOR_CST)
1660 return NULL_TREE;
1661
1662 elts = XALLOCAVEC (tree, nelts * 2);
1663 if (!vec_cst_ctor_to_array (arg0, elts))
1664 return NULL_TREE;
1665
1666 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1667 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1668 elts += nelts;
1669
1670 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1671 subcode = NOP_EXPR;
1672 else
1673 subcode = FLOAT_EXPR;
1674
1675 for (i = 0; i < nelts; i++)
1676 {
1677 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1678 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1679 return NULL_TREE;
1680 }
1681
1682 return build_vector (type, elts);
1683 }
1684
1685 case REDUC_MIN_EXPR:
1686 case REDUC_MAX_EXPR:
1687 case REDUC_PLUS_EXPR:
1688 {
1689 unsigned int nelts, i;
1690 tree *elts;
1691 enum tree_code subcode;
1692
1693 if (TREE_CODE (arg0) != VECTOR_CST)
1694 return NULL_TREE;
1695 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1696
1697 elts = XALLOCAVEC (tree, nelts);
1698 if (!vec_cst_ctor_to_array (arg0, elts))
1699 return NULL_TREE;
1700
1701 switch (code)
1702 {
1703 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1704 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1705 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1706 default: gcc_unreachable ();
1707 }
1708
1709 for (i = 1; i < nelts; i++)
1710 {
1711 elts[0] = const_binop (subcode, elts[0], elts[i]);
1712 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1713 return NULL_TREE;
1714 }
1715
1716 return elts[0];
1717 }
1718
1719 default:
1720 break;
1721 }
1722
1723 return NULL_TREE;
1724 }
1725
1726 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1727 indicates which particular sizetype to create. */
1728
1729 tree
1730 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1731 {
1732 return build_int_cst (sizetype_tab[(int) kind], number);
1733 }
1734 \f
1735 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1736 is a tree code. The type of the result is taken from the operands.
1737 Both must be equivalent integer types, ala int_binop_types_match_p.
1738 If the operands are constant, so is the result. */
1739
1740 tree
1741 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1742 {
1743 tree type = TREE_TYPE (arg0);
1744
1745 if (arg0 == error_mark_node || arg1 == error_mark_node)
1746 return error_mark_node;
1747
1748 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1749 TREE_TYPE (arg1)));
1750
1751 /* Handle the special case of two integer constants faster. */
1752 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1753 {
1754 /* And some specific cases even faster than that. */
1755 if (code == PLUS_EXPR)
1756 {
1757 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1758 return arg1;
1759 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1760 return arg0;
1761 }
1762 else if (code == MINUS_EXPR)
1763 {
1764 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1765 return arg0;
1766 }
1767 else if (code == MULT_EXPR)
1768 {
1769 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1770 return arg1;
1771 }
1772
1773 /* Handle general case of two integer constants. For sizetype
1774 constant calculations we always want to know about overflow,
1775 even in the unsigned case. */
1776 return int_const_binop_1 (code, arg0, arg1, -1);
1777 }
1778
1779 return fold_build2_loc (loc, code, type, arg0, arg1);
1780 }
1781
1782 /* Given two values, either both of sizetype or both of bitsizetype,
1783 compute the difference between the two values. Return the value
1784 in signed type corresponding to the type of the operands. */
1785
1786 tree
1787 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1788 {
1789 tree type = TREE_TYPE (arg0);
1790 tree ctype;
1791
1792 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1793 TREE_TYPE (arg1)));
1794
1795 /* If the type is already signed, just do the simple thing. */
1796 if (!TYPE_UNSIGNED (type))
1797 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1798
1799 if (type == sizetype)
1800 ctype = ssizetype;
1801 else if (type == bitsizetype)
1802 ctype = sbitsizetype;
1803 else
1804 ctype = signed_type_for (type);
1805
1806 /* If either operand is not a constant, do the conversions to the signed
1807 type and subtract. The hardware will do the right thing with any
1808 overflow in the subtraction. */
1809 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1810 return size_binop_loc (loc, MINUS_EXPR,
1811 fold_convert_loc (loc, ctype, arg0),
1812 fold_convert_loc (loc, ctype, arg1));
1813
1814 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1815 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1816 overflow) and negate (which can't either). Special-case a result
1817 of zero while we're here. */
1818 if (tree_int_cst_equal (arg0, arg1))
1819 return build_int_cst (ctype, 0);
1820 else if (tree_int_cst_lt (arg1, arg0))
1821 return fold_convert_loc (loc, ctype,
1822 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1823 else
1824 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1825 fold_convert_loc (loc, ctype,
1826 size_binop_loc (loc,
1827 MINUS_EXPR,
1828 arg1, arg0)));
1829 }
1830 \f
1831 /* A subroutine of fold_convert_const handling conversions of an
1832 INTEGER_CST to another integer type. */
1833
1834 static tree
1835 fold_convert_const_int_from_int (tree type, const_tree arg1)
1836 {
1837 /* Given an integer constant, make new constant with new type,
1838 appropriately sign-extended or truncated. Use widest_int
1839 so that any extension is done according ARG1's type. */
1840 return force_fit_type (type, wi::to_widest (arg1),
1841 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1842 TREE_OVERFLOW (arg1));
1843 }
1844
1845 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1846 to an integer type. */
1847
1848 static tree
1849 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1850 {
1851 bool overflow = false;
1852 tree t;
1853
1854 /* The following code implements the floating point to integer
1855 conversion rules required by the Java Language Specification,
1856 that IEEE NaNs are mapped to zero and values that overflow
1857 the target precision saturate, i.e. values greater than
1858 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1859 are mapped to INT_MIN. These semantics are allowed by the
1860 C and C++ standards that simply state that the behavior of
1861 FP-to-integer conversion is unspecified upon overflow. */
1862
1863 wide_int val;
1864 REAL_VALUE_TYPE r;
1865 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1866
1867 switch (code)
1868 {
1869 case FIX_TRUNC_EXPR:
1870 real_trunc (&r, VOIDmode, &x);
1871 break;
1872
1873 default:
1874 gcc_unreachable ();
1875 }
1876
1877 /* If R is NaN, return zero and show we have an overflow. */
1878 if (REAL_VALUE_ISNAN (r))
1879 {
1880 overflow = true;
1881 val = wi::zero (TYPE_PRECISION (type));
1882 }
1883
1884 /* See if R is less than the lower bound or greater than the
1885 upper bound. */
1886
1887 if (! overflow)
1888 {
1889 tree lt = TYPE_MIN_VALUE (type);
1890 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1891 if (REAL_VALUES_LESS (r, l))
1892 {
1893 overflow = true;
1894 val = lt;
1895 }
1896 }
1897
1898 if (! overflow)
1899 {
1900 tree ut = TYPE_MAX_VALUE (type);
1901 if (ut)
1902 {
1903 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1904 if (REAL_VALUES_LESS (u, r))
1905 {
1906 overflow = true;
1907 val = ut;
1908 }
1909 }
1910 }
1911
1912 if (! overflow)
1913 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1914
1915 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1916 return t;
1917 }
1918
1919 /* A subroutine of fold_convert_const handling conversions of a
1920 FIXED_CST to an integer type. */
1921
1922 static tree
1923 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1924 {
1925 tree t;
1926 double_int temp, temp_trunc;
1927 unsigned int mode;
1928
1929 /* Right shift FIXED_CST to temp by fbit. */
1930 temp = TREE_FIXED_CST (arg1).data;
1931 mode = TREE_FIXED_CST (arg1).mode;
1932 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1933 {
1934 temp = temp.rshift (GET_MODE_FBIT (mode),
1935 HOST_BITS_PER_DOUBLE_INT,
1936 SIGNED_FIXED_POINT_MODE_P (mode));
1937
1938 /* Left shift temp to temp_trunc by fbit. */
1939 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1940 HOST_BITS_PER_DOUBLE_INT,
1941 SIGNED_FIXED_POINT_MODE_P (mode));
1942 }
1943 else
1944 {
1945 temp = double_int_zero;
1946 temp_trunc = double_int_zero;
1947 }
1948
1949 /* If FIXED_CST is negative, we need to round the value toward 0.
1950 By checking if the fractional bits are not zero to add 1 to temp. */
1951 if (SIGNED_FIXED_POINT_MODE_P (mode)
1952 && temp_trunc.is_negative ()
1953 && TREE_FIXED_CST (arg1).data != temp_trunc)
1954 temp += double_int_one;
1955
1956 /* Given a fixed-point constant, make new constant with new type,
1957 appropriately sign-extended or truncated. */
1958 t = force_fit_type (type, temp, -1,
1959 (temp.is_negative ()
1960 && (TYPE_UNSIGNED (type)
1961 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1962 | TREE_OVERFLOW (arg1));
1963
1964 return t;
1965 }
1966
1967 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1968 to another floating point type. */
1969
1970 static tree
1971 fold_convert_const_real_from_real (tree type, const_tree arg1)
1972 {
1973 REAL_VALUE_TYPE value;
1974 tree t;
1975
1976 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1977 t = build_real (type, value);
1978
1979 /* If converting an infinity or NAN to a representation that doesn't
1980 have one, set the overflow bit so that we can produce some kind of
1981 error message at the appropriate point if necessary. It's not the
1982 most user-friendly message, but it's better than nothing. */
1983 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1984 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1985 TREE_OVERFLOW (t) = 1;
1986 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1987 && !MODE_HAS_NANS (TYPE_MODE (type)))
1988 TREE_OVERFLOW (t) = 1;
1989 /* Regular overflow, conversion produced an infinity in a mode that
1990 can't represent them. */
1991 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1992 && REAL_VALUE_ISINF (value)
1993 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1994 TREE_OVERFLOW (t) = 1;
1995 else
1996 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
1997 return t;
1998 }
1999
2000 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2001 to a floating point type. */
2002
2003 static tree
2004 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2005 {
2006 REAL_VALUE_TYPE value;
2007 tree t;
2008
2009 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2010 t = build_real (type, value);
2011
2012 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2013 return t;
2014 }
2015
2016 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2017 to another fixed-point type. */
2018
2019 static tree
2020 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2021 {
2022 FIXED_VALUE_TYPE value;
2023 tree t;
2024 bool overflow_p;
2025
2026 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2027 TYPE_SATURATING (type));
2028 t = build_fixed (type, value);
2029
2030 /* Propagate overflow flags. */
2031 if (overflow_p | TREE_OVERFLOW (arg1))
2032 TREE_OVERFLOW (t) = 1;
2033 return t;
2034 }
2035
2036 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2037 to a fixed-point type. */
2038
2039 static tree
2040 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2041 {
2042 FIXED_VALUE_TYPE value;
2043 tree t;
2044 bool overflow_p;
2045 double_int di;
2046
2047 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2048
2049 di.low = TREE_INT_CST_ELT (arg1, 0);
2050 if (TREE_INT_CST_NUNITS (arg1) == 1)
2051 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2052 else
2053 di.high = TREE_INT_CST_ELT (arg1, 1);
2054
2055 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2056 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2057 TYPE_SATURATING (type));
2058 t = build_fixed (type, value);
2059
2060 /* Propagate overflow flags. */
2061 if (overflow_p | TREE_OVERFLOW (arg1))
2062 TREE_OVERFLOW (t) = 1;
2063 return t;
2064 }
2065
2066 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2067 to a fixed-point type. */
2068
2069 static tree
2070 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2071 {
2072 FIXED_VALUE_TYPE value;
2073 tree t;
2074 bool overflow_p;
2075
2076 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2077 &TREE_REAL_CST (arg1),
2078 TYPE_SATURATING (type));
2079 t = build_fixed (type, value);
2080
2081 /* Propagate overflow flags. */
2082 if (overflow_p | TREE_OVERFLOW (arg1))
2083 TREE_OVERFLOW (t) = 1;
2084 return t;
2085 }
2086
2087 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2088 type TYPE. If no simplification can be done return NULL_TREE. */
2089
2090 static tree
2091 fold_convert_const (enum tree_code code, tree type, tree arg1)
2092 {
2093 if (TREE_TYPE (arg1) == type)
2094 return arg1;
2095
2096 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2097 || TREE_CODE (type) == OFFSET_TYPE)
2098 {
2099 if (TREE_CODE (arg1) == INTEGER_CST)
2100 return fold_convert_const_int_from_int (type, arg1);
2101 else if (TREE_CODE (arg1) == REAL_CST)
2102 return fold_convert_const_int_from_real (code, type, arg1);
2103 else if (TREE_CODE (arg1) == FIXED_CST)
2104 return fold_convert_const_int_from_fixed (type, arg1);
2105 }
2106 else if (TREE_CODE (type) == REAL_TYPE)
2107 {
2108 if (TREE_CODE (arg1) == INTEGER_CST)
2109 return build_real_from_int_cst (type, arg1);
2110 else if (TREE_CODE (arg1) == REAL_CST)
2111 return fold_convert_const_real_from_real (type, arg1);
2112 else if (TREE_CODE (arg1) == FIXED_CST)
2113 return fold_convert_const_real_from_fixed (type, arg1);
2114 }
2115 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2116 {
2117 if (TREE_CODE (arg1) == FIXED_CST)
2118 return fold_convert_const_fixed_from_fixed (type, arg1);
2119 else if (TREE_CODE (arg1) == INTEGER_CST)
2120 return fold_convert_const_fixed_from_int (type, arg1);
2121 else if (TREE_CODE (arg1) == REAL_CST)
2122 return fold_convert_const_fixed_from_real (type, arg1);
2123 }
2124 return NULL_TREE;
2125 }
2126
2127 /* Construct a vector of zero elements of vector type TYPE. */
2128
2129 static tree
2130 build_zero_vector (tree type)
2131 {
2132 tree t;
2133
2134 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2135 return build_vector_from_val (type, t);
2136 }
2137
2138 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2139
2140 bool
2141 fold_convertible_p (const_tree type, const_tree arg)
2142 {
2143 tree orig = TREE_TYPE (arg);
2144
2145 if (type == orig)
2146 return true;
2147
2148 if (TREE_CODE (arg) == ERROR_MARK
2149 || TREE_CODE (type) == ERROR_MARK
2150 || TREE_CODE (orig) == ERROR_MARK)
2151 return false;
2152
2153 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2154 return true;
2155
2156 switch (TREE_CODE (type))
2157 {
2158 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2159 case POINTER_TYPE: case REFERENCE_TYPE:
2160 case OFFSET_TYPE:
2161 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2162 || TREE_CODE (orig) == OFFSET_TYPE)
2163 return true;
2164 return (TREE_CODE (orig) == VECTOR_TYPE
2165 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2166
2167 case REAL_TYPE:
2168 case FIXED_POINT_TYPE:
2169 case COMPLEX_TYPE:
2170 case VECTOR_TYPE:
2171 case VOID_TYPE:
2172 return TREE_CODE (type) == TREE_CODE (orig);
2173
2174 default:
2175 return false;
2176 }
2177 }
2178
2179 /* Convert expression ARG to type TYPE. Used by the middle-end for
2180 simple conversions in preference to calling the front-end's convert. */
2181
2182 tree
2183 fold_convert_loc (location_t loc, tree type, tree arg)
2184 {
2185 tree orig = TREE_TYPE (arg);
2186 tree tem;
2187
2188 if (type == orig)
2189 return arg;
2190
2191 if (TREE_CODE (arg) == ERROR_MARK
2192 || TREE_CODE (type) == ERROR_MARK
2193 || TREE_CODE (orig) == ERROR_MARK)
2194 return error_mark_node;
2195
2196 switch (TREE_CODE (type))
2197 {
2198 case POINTER_TYPE:
2199 case REFERENCE_TYPE:
2200 /* Handle conversions between pointers to different address spaces. */
2201 if (POINTER_TYPE_P (orig)
2202 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2203 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2204 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2205 /* fall through */
2206
2207 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2208 case OFFSET_TYPE:
2209 if (TREE_CODE (arg) == INTEGER_CST)
2210 {
2211 tem = fold_convert_const (NOP_EXPR, type, arg);
2212 if (tem != NULL_TREE)
2213 return tem;
2214 }
2215 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2216 || TREE_CODE (orig) == OFFSET_TYPE)
2217 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2218 if (TREE_CODE (orig) == COMPLEX_TYPE)
2219 return fold_convert_loc (loc, type,
2220 fold_build1_loc (loc, REALPART_EXPR,
2221 TREE_TYPE (orig), arg));
2222 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2223 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2224 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2225
2226 case REAL_TYPE:
2227 if (TREE_CODE (arg) == INTEGER_CST)
2228 {
2229 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2230 if (tem != NULL_TREE)
2231 return tem;
2232 }
2233 else if (TREE_CODE (arg) == REAL_CST)
2234 {
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239 else if (TREE_CODE (arg) == FIXED_CST)
2240 {
2241 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2242 if (tem != NULL_TREE)
2243 return tem;
2244 }
2245
2246 switch (TREE_CODE (orig))
2247 {
2248 case INTEGER_TYPE:
2249 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2250 case POINTER_TYPE: case REFERENCE_TYPE:
2251 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2252
2253 case REAL_TYPE:
2254 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2255
2256 case FIXED_POINT_TYPE:
2257 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2258
2259 case COMPLEX_TYPE:
2260 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2261 return fold_convert_loc (loc, type, tem);
2262
2263 default:
2264 gcc_unreachable ();
2265 }
2266
2267 case FIXED_POINT_TYPE:
2268 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2269 || TREE_CODE (arg) == REAL_CST)
2270 {
2271 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2272 if (tem != NULL_TREE)
2273 goto fold_convert_exit;
2274 }
2275
2276 switch (TREE_CODE (orig))
2277 {
2278 case FIXED_POINT_TYPE:
2279 case INTEGER_TYPE:
2280 case ENUMERAL_TYPE:
2281 case BOOLEAN_TYPE:
2282 case REAL_TYPE:
2283 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2284
2285 case COMPLEX_TYPE:
2286 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2287 return fold_convert_loc (loc, type, tem);
2288
2289 default:
2290 gcc_unreachable ();
2291 }
2292
2293 case COMPLEX_TYPE:
2294 switch (TREE_CODE (orig))
2295 {
2296 case INTEGER_TYPE:
2297 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2298 case POINTER_TYPE: case REFERENCE_TYPE:
2299 case REAL_TYPE:
2300 case FIXED_POINT_TYPE:
2301 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2302 fold_convert_loc (loc, TREE_TYPE (type), arg),
2303 fold_convert_loc (loc, TREE_TYPE (type),
2304 integer_zero_node));
2305 case COMPLEX_TYPE:
2306 {
2307 tree rpart, ipart;
2308
2309 if (TREE_CODE (arg) == COMPLEX_EXPR)
2310 {
2311 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2312 TREE_OPERAND (arg, 0));
2313 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2314 TREE_OPERAND (arg, 1));
2315 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2316 }
2317
2318 arg = save_expr (arg);
2319 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2321 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2322 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2323 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2324 }
2325
2326 default:
2327 gcc_unreachable ();
2328 }
2329
2330 case VECTOR_TYPE:
2331 if (integer_zerop (arg))
2332 return build_zero_vector (type);
2333 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2334 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2335 || TREE_CODE (orig) == VECTOR_TYPE);
2336 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2337
2338 case VOID_TYPE:
2339 tem = fold_ignored_result (arg);
2340 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2341
2342 default:
2343 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2344 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2345 gcc_unreachable ();
2346 }
2347 fold_convert_exit:
2348 protected_set_expr_location_unshare (tem, loc);
2349 return tem;
2350 }
2351 \f
2352 /* Return false if expr can be assumed not to be an lvalue, true
2353 otherwise. */
2354
2355 static bool
2356 maybe_lvalue_p (const_tree x)
2357 {
2358 /* We only need to wrap lvalue tree codes. */
2359 switch (TREE_CODE (x))
2360 {
2361 case VAR_DECL:
2362 case PARM_DECL:
2363 case RESULT_DECL:
2364 case LABEL_DECL:
2365 case FUNCTION_DECL:
2366 case SSA_NAME:
2367
2368 case COMPONENT_REF:
2369 case MEM_REF:
2370 case INDIRECT_REF:
2371 case ARRAY_REF:
2372 case ARRAY_RANGE_REF:
2373 case BIT_FIELD_REF:
2374 case OBJ_TYPE_REF:
2375
2376 case REALPART_EXPR:
2377 case IMAGPART_EXPR:
2378 case PREINCREMENT_EXPR:
2379 case PREDECREMENT_EXPR:
2380 case SAVE_EXPR:
2381 case TRY_CATCH_EXPR:
2382 case WITH_CLEANUP_EXPR:
2383 case COMPOUND_EXPR:
2384 case MODIFY_EXPR:
2385 case TARGET_EXPR:
2386 case COND_EXPR:
2387 case BIND_EXPR:
2388 break;
2389
2390 default:
2391 /* Assume the worst for front-end tree codes. */
2392 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2393 break;
2394 return false;
2395 }
2396
2397 return true;
2398 }
2399
2400 /* Return an expr equal to X but certainly not valid as an lvalue. */
2401
2402 tree
2403 non_lvalue_loc (location_t loc, tree x)
2404 {
2405 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2406 us. */
2407 if (in_gimple_form)
2408 return x;
2409
2410 if (! maybe_lvalue_p (x))
2411 return x;
2412 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2413 }
2414
2415 /* When pedantic, return an expr equal to X but certainly not valid as a
2416 pedantic lvalue. Otherwise, return X. */
2417
2418 static tree
2419 pedantic_non_lvalue_loc (location_t loc, tree x)
2420 {
2421 return protected_set_expr_location_unshare (x, loc);
2422 }
2423 \f
2424 /* Given a tree comparison code, return the code that is the logical inverse.
2425 It is generally not safe to do this for floating-point comparisons, except
2426 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2427 ERROR_MARK in this case. */
2428
2429 enum tree_code
2430 invert_tree_comparison (enum tree_code code, bool honor_nans)
2431 {
2432 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2433 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2434 return ERROR_MARK;
2435
2436 switch (code)
2437 {
2438 case EQ_EXPR:
2439 return NE_EXPR;
2440 case NE_EXPR:
2441 return EQ_EXPR;
2442 case GT_EXPR:
2443 return honor_nans ? UNLE_EXPR : LE_EXPR;
2444 case GE_EXPR:
2445 return honor_nans ? UNLT_EXPR : LT_EXPR;
2446 case LT_EXPR:
2447 return honor_nans ? UNGE_EXPR : GE_EXPR;
2448 case LE_EXPR:
2449 return honor_nans ? UNGT_EXPR : GT_EXPR;
2450 case LTGT_EXPR:
2451 return UNEQ_EXPR;
2452 case UNEQ_EXPR:
2453 return LTGT_EXPR;
2454 case UNGT_EXPR:
2455 return LE_EXPR;
2456 case UNGE_EXPR:
2457 return LT_EXPR;
2458 case UNLT_EXPR:
2459 return GE_EXPR;
2460 case UNLE_EXPR:
2461 return GT_EXPR;
2462 case ORDERED_EXPR:
2463 return UNORDERED_EXPR;
2464 case UNORDERED_EXPR:
2465 return ORDERED_EXPR;
2466 default:
2467 gcc_unreachable ();
2468 }
2469 }
2470
2471 /* Similar, but return the comparison that results if the operands are
2472 swapped. This is safe for floating-point. */
2473
2474 enum tree_code
2475 swap_tree_comparison (enum tree_code code)
2476 {
2477 switch (code)
2478 {
2479 case EQ_EXPR:
2480 case NE_EXPR:
2481 case ORDERED_EXPR:
2482 case UNORDERED_EXPR:
2483 case LTGT_EXPR:
2484 case UNEQ_EXPR:
2485 return code;
2486 case GT_EXPR:
2487 return LT_EXPR;
2488 case GE_EXPR:
2489 return LE_EXPR;
2490 case LT_EXPR:
2491 return GT_EXPR;
2492 case LE_EXPR:
2493 return GE_EXPR;
2494 case UNGT_EXPR:
2495 return UNLT_EXPR;
2496 case UNGE_EXPR:
2497 return UNLE_EXPR;
2498 case UNLT_EXPR:
2499 return UNGT_EXPR;
2500 case UNLE_EXPR:
2501 return UNGE_EXPR;
2502 default:
2503 gcc_unreachable ();
2504 }
2505 }
2506
2507
2508 /* Convert a comparison tree code from an enum tree_code representation
2509 into a compcode bit-based encoding. This function is the inverse of
2510 compcode_to_comparison. */
2511
2512 static enum comparison_code
2513 comparison_to_compcode (enum tree_code code)
2514 {
2515 switch (code)
2516 {
2517 case LT_EXPR:
2518 return COMPCODE_LT;
2519 case EQ_EXPR:
2520 return COMPCODE_EQ;
2521 case LE_EXPR:
2522 return COMPCODE_LE;
2523 case GT_EXPR:
2524 return COMPCODE_GT;
2525 case NE_EXPR:
2526 return COMPCODE_NE;
2527 case GE_EXPR:
2528 return COMPCODE_GE;
2529 case ORDERED_EXPR:
2530 return COMPCODE_ORD;
2531 case UNORDERED_EXPR:
2532 return COMPCODE_UNORD;
2533 case UNLT_EXPR:
2534 return COMPCODE_UNLT;
2535 case UNEQ_EXPR:
2536 return COMPCODE_UNEQ;
2537 case UNLE_EXPR:
2538 return COMPCODE_UNLE;
2539 case UNGT_EXPR:
2540 return COMPCODE_UNGT;
2541 case LTGT_EXPR:
2542 return COMPCODE_LTGT;
2543 case UNGE_EXPR:
2544 return COMPCODE_UNGE;
2545 default:
2546 gcc_unreachable ();
2547 }
2548 }
2549
2550 /* Convert a compcode bit-based encoding of a comparison operator back
2551 to GCC's enum tree_code representation. This function is the
2552 inverse of comparison_to_compcode. */
2553
2554 static enum tree_code
2555 compcode_to_comparison (enum comparison_code code)
2556 {
2557 switch (code)
2558 {
2559 case COMPCODE_LT:
2560 return LT_EXPR;
2561 case COMPCODE_EQ:
2562 return EQ_EXPR;
2563 case COMPCODE_LE:
2564 return LE_EXPR;
2565 case COMPCODE_GT:
2566 return GT_EXPR;
2567 case COMPCODE_NE:
2568 return NE_EXPR;
2569 case COMPCODE_GE:
2570 return GE_EXPR;
2571 case COMPCODE_ORD:
2572 return ORDERED_EXPR;
2573 case COMPCODE_UNORD:
2574 return UNORDERED_EXPR;
2575 case COMPCODE_UNLT:
2576 return UNLT_EXPR;
2577 case COMPCODE_UNEQ:
2578 return UNEQ_EXPR;
2579 case COMPCODE_UNLE:
2580 return UNLE_EXPR;
2581 case COMPCODE_UNGT:
2582 return UNGT_EXPR;
2583 case COMPCODE_LTGT:
2584 return LTGT_EXPR;
2585 case COMPCODE_UNGE:
2586 return UNGE_EXPR;
2587 default:
2588 gcc_unreachable ();
2589 }
2590 }
2591
2592 /* Return a tree for the comparison which is the combination of
2593 doing the AND or OR (depending on CODE) of the two operations LCODE
2594 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2595 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2596 if this makes the transformation invalid. */
2597
2598 tree
2599 combine_comparisons (location_t loc,
2600 enum tree_code code, enum tree_code lcode,
2601 enum tree_code rcode, tree truth_type,
2602 tree ll_arg, tree lr_arg)
2603 {
2604 bool honor_nans = HONOR_NANS (ll_arg);
2605 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2606 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2607 int compcode;
2608
2609 switch (code)
2610 {
2611 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2612 compcode = lcompcode & rcompcode;
2613 break;
2614
2615 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2616 compcode = lcompcode | rcompcode;
2617 break;
2618
2619 default:
2620 return NULL_TREE;
2621 }
2622
2623 if (!honor_nans)
2624 {
2625 /* Eliminate unordered comparisons, as well as LTGT and ORD
2626 which are not used unless the mode has NaNs. */
2627 compcode &= ~COMPCODE_UNORD;
2628 if (compcode == COMPCODE_LTGT)
2629 compcode = COMPCODE_NE;
2630 else if (compcode == COMPCODE_ORD)
2631 compcode = COMPCODE_TRUE;
2632 }
2633 else if (flag_trapping_math)
2634 {
2635 /* Check that the original operation and the optimized ones will trap
2636 under the same condition. */
2637 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2638 && (lcompcode != COMPCODE_EQ)
2639 && (lcompcode != COMPCODE_ORD);
2640 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2641 && (rcompcode != COMPCODE_EQ)
2642 && (rcompcode != COMPCODE_ORD);
2643 bool trap = (compcode & COMPCODE_UNORD) == 0
2644 && (compcode != COMPCODE_EQ)
2645 && (compcode != COMPCODE_ORD);
2646
2647 /* In a short-circuited boolean expression the LHS might be
2648 such that the RHS, if evaluated, will never trap. For
2649 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2650 if neither x nor y is NaN. (This is a mixed blessing: for
2651 example, the expression above will never trap, hence
2652 optimizing it to x < y would be invalid). */
2653 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2654 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2655 rtrap = false;
2656
2657 /* If the comparison was short-circuited, and only the RHS
2658 trapped, we may now generate a spurious trap. */
2659 if (rtrap && !ltrap
2660 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2661 return NULL_TREE;
2662
2663 /* If we changed the conditions that cause a trap, we lose. */
2664 if ((ltrap || rtrap) != trap)
2665 return NULL_TREE;
2666 }
2667
2668 if (compcode == COMPCODE_TRUE)
2669 return constant_boolean_node (true, truth_type);
2670 else if (compcode == COMPCODE_FALSE)
2671 return constant_boolean_node (false, truth_type);
2672 else
2673 {
2674 enum tree_code tcode;
2675
2676 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2677 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2678 }
2679 }
2680 \f
2681 /* Return nonzero if two operands (typically of the same tree node)
2682 are necessarily equal. If either argument has side-effects this
2683 function returns zero. FLAGS modifies behavior as follows:
2684
2685 If OEP_ONLY_CONST is set, only return nonzero for constants.
2686 This function tests whether the operands are indistinguishable;
2687 it does not test whether they are equal using C's == operation.
2688 The distinction is important for IEEE floating point, because
2689 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2690 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2691
2692 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2693 even though it may hold multiple values during a function.
2694 This is because a GCC tree node guarantees that nothing else is
2695 executed between the evaluation of its "operands" (which may often
2696 be evaluated in arbitrary order). Hence if the operands themselves
2697 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2698 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2699 unset means assuming isochronic (or instantaneous) tree equivalence.
2700 Unless comparing arbitrary expression trees, such as from different
2701 statements, this flag can usually be left unset.
2702
2703 If OEP_PURE_SAME is set, then pure functions with identical arguments
2704 are considered the same. It is used when the caller has other ways
2705 to ensure that global memory is unchanged in between. */
2706
2707 int
2708 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2709 {
2710 /* If either is ERROR_MARK, they aren't equal. */
2711 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2712 || TREE_TYPE (arg0) == error_mark_node
2713 || TREE_TYPE (arg1) == error_mark_node)
2714 return 0;
2715
2716 /* Similar, if either does not have a type (like a released SSA name),
2717 they aren't equal. */
2718 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2719 return 0;
2720
2721 /* Check equality of integer constants before bailing out due to
2722 precision differences. */
2723 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2724 return tree_int_cst_equal (arg0, arg1);
2725
2726 /* If both types don't have the same signedness, then we can't consider
2727 them equal. We must check this before the STRIP_NOPS calls
2728 because they may change the signedness of the arguments. As pointers
2729 strictly don't have a signedness, require either two pointers or
2730 two non-pointers as well. */
2731 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2732 || POINTER_TYPE_P (TREE_TYPE (arg0)) != POINTER_TYPE_P (TREE_TYPE (arg1)))
2733 return 0;
2734
2735 /* We cannot consider pointers to different address space equal. */
2736 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && POINTER_TYPE_P (TREE_TYPE (arg1))
2737 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2738 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2739 return 0;
2740
2741 /* If both types don't have the same precision, then it is not safe
2742 to strip NOPs. */
2743 if (element_precision (TREE_TYPE (arg0))
2744 != element_precision (TREE_TYPE (arg1)))
2745 return 0;
2746
2747 STRIP_NOPS (arg0);
2748 STRIP_NOPS (arg1);
2749
2750 /* In case both args are comparisons but with different comparison
2751 code, try to swap the comparison operands of one arg to produce
2752 a match and compare that variant. */
2753 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2754 && COMPARISON_CLASS_P (arg0)
2755 && COMPARISON_CLASS_P (arg1))
2756 {
2757 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2758
2759 if (TREE_CODE (arg0) == swap_code)
2760 return operand_equal_p (TREE_OPERAND (arg0, 0),
2761 TREE_OPERAND (arg1, 1), flags)
2762 && operand_equal_p (TREE_OPERAND (arg0, 1),
2763 TREE_OPERAND (arg1, 0), flags);
2764 }
2765
2766 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2767 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2768 && !(CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1)))
2769 return 0;
2770
2771 /* This is needed for conversions and for COMPONENT_REF.
2772 Might as well play it safe and always test this. */
2773 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2774 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2775 || TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1)))
2776 return 0;
2777
2778 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2779 We don't care about side effects in that case because the SAVE_EXPR
2780 takes care of that for us. In all other cases, two expressions are
2781 equal if they have no side effects. If we have two identical
2782 expressions with side effects that should be treated the same due
2783 to the only side effects being identical SAVE_EXPR's, that will
2784 be detected in the recursive calls below.
2785 If we are taking an invariant address of two identical objects
2786 they are necessarily equal as well. */
2787 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2788 && (TREE_CODE (arg0) == SAVE_EXPR
2789 || (flags & OEP_CONSTANT_ADDRESS_OF)
2790 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2791 return 1;
2792
2793 /* Next handle constant cases, those for which we can return 1 even
2794 if ONLY_CONST is set. */
2795 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2796 switch (TREE_CODE (arg0))
2797 {
2798 case INTEGER_CST:
2799 return tree_int_cst_equal (arg0, arg1);
2800
2801 case FIXED_CST:
2802 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2803 TREE_FIXED_CST (arg1));
2804
2805 case REAL_CST:
2806 if (REAL_VALUES_IDENTICAL (TREE_REAL_CST (arg0),
2807 TREE_REAL_CST (arg1)))
2808 return 1;
2809
2810
2811 if (!HONOR_SIGNED_ZEROS (arg0))
2812 {
2813 /* If we do not distinguish between signed and unsigned zero,
2814 consider them equal. */
2815 if (real_zerop (arg0) && real_zerop (arg1))
2816 return 1;
2817 }
2818 return 0;
2819
2820 case VECTOR_CST:
2821 {
2822 unsigned i;
2823
2824 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2825 return 0;
2826
2827 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2828 {
2829 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2830 VECTOR_CST_ELT (arg1, i), flags))
2831 return 0;
2832 }
2833 return 1;
2834 }
2835
2836 case COMPLEX_CST:
2837 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2838 flags)
2839 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2840 flags));
2841
2842 case STRING_CST:
2843 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2844 && ! memcmp (TREE_STRING_POINTER (arg0),
2845 TREE_STRING_POINTER (arg1),
2846 TREE_STRING_LENGTH (arg0)));
2847
2848 case ADDR_EXPR:
2849 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2850 TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1)
2851 ? OEP_CONSTANT_ADDRESS_OF | OEP_ADDRESS_OF : 0);
2852 default:
2853 break;
2854 }
2855
2856 if (flags & OEP_ONLY_CONST)
2857 return 0;
2858
2859 /* Define macros to test an operand from arg0 and arg1 for equality and a
2860 variant that allows null and views null as being different from any
2861 non-null value. In the latter case, if either is null, the both
2862 must be; otherwise, do the normal comparison. */
2863 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2864 TREE_OPERAND (arg1, N), flags)
2865
2866 #define OP_SAME_WITH_NULL(N) \
2867 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2868 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2869
2870 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2871 {
2872 case tcc_unary:
2873 /* Two conversions are equal only if signedness and modes match. */
2874 switch (TREE_CODE (arg0))
2875 {
2876 CASE_CONVERT:
2877 case FIX_TRUNC_EXPR:
2878 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2879 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2880 return 0;
2881 break;
2882 default:
2883 break;
2884 }
2885
2886 return OP_SAME (0);
2887
2888
2889 case tcc_comparison:
2890 case tcc_binary:
2891 if (OP_SAME (0) && OP_SAME (1))
2892 return 1;
2893
2894 /* For commutative ops, allow the other order. */
2895 return (commutative_tree_code (TREE_CODE (arg0))
2896 && operand_equal_p (TREE_OPERAND (arg0, 0),
2897 TREE_OPERAND (arg1, 1), flags)
2898 && operand_equal_p (TREE_OPERAND (arg0, 1),
2899 TREE_OPERAND (arg1, 0), flags));
2900
2901 case tcc_reference:
2902 /* If either of the pointer (or reference) expressions we are
2903 dereferencing contain a side effect, these cannot be equal,
2904 but their addresses can be. */
2905 if ((flags & OEP_CONSTANT_ADDRESS_OF) == 0
2906 && (TREE_SIDE_EFFECTS (arg0)
2907 || TREE_SIDE_EFFECTS (arg1)))
2908 return 0;
2909
2910 switch (TREE_CODE (arg0))
2911 {
2912 case INDIRECT_REF:
2913 if (!(flags & OEP_ADDRESS_OF)
2914 && (TYPE_ALIGN (TREE_TYPE (arg0))
2915 != TYPE_ALIGN (TREE_TYPE (arg1))))
2916 return 0;
2917 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2918 return OP_SAME (0);
2919
2920 case REALPART_EXPR:
2921 case IMAGPART_EXPR:
2922 return OP_SAME (0);
2923
2924 case TARGET_MEM_REF:
2925 case MEM_REF:
2926 /* Require equal access sizes, and similar pointer types.
2927 We can have incomplete types for array references of
2928 variable-sized arrays from the Fortran frontend
2929 though. Also verify the types are compatible. */
2930 if (!((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
2931 || (TYPE_SIZE (TREE_TYPE (arg0))
2932 && TYPE_SIZE (TREE_TYPE (arg1))
2933 && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
2934 TYPE_SIZE (TREE_TYPE (arg1)), flags)))
2935 && types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1))
2936 && ((flags & OEP_ADDRESS_OF)
2937 || (alias_ptr_types_compatible_p
2938 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
2939 TREE_TYPE (TREE_OPERAND (arg1, 1)))
2940 && (MR_DEPENDENCE_CLIQUE (arg0)
2941 == MR_DEPENDENCE_CLIQUE (arg1))
2942 && (MR_DEPENDENCE_BASE (arg0)
2943 == MR_DEPENDENCE_BASE (arg1))
2944 && (TYPE_ALIGN (TREE_TYPE (arg0))
2945 == TYPE_ALIGN (TREE_TYPE (arg1)))))))
2946 return 0;
2947 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2948 return (OP_SAME (0) && OP_SAME (1)
2949 /* TARGET_MEM_REF require equal extra operands. */
2950 && (TREE_CODE (arg0) != TARGET_MEM_REF
2951 || (OP_SAME_WITH_NULL (2)
2952 && OP_SAME_WITH_NULL (3)
2953 && OP_SAME_WITH_NULL (4))));
2954
2955 case ARRAY_REF:
2956 case ARRAY_RANGE_REF:
2957 /* Operands 2 and 3 may be null.
2958 Compare the array index by value if it is constant first as we
2959 may have different types but same value here. */
2960 if (!OP_SAME (0))
2961 return 0;
2962 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2963 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
2964 TREE_OPERAND (arg1, 1))
2965 || OP_SAME (1))
2966 && OP_SAME_WITH_NULL (2)
2967 && OP_SAME_WITH_NULL (3));
2968
2969 case COMPONENT_REF:
2970 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
2971 may be NULL when we're called to compare MEM_EXPRs. */
2972 if (!OP_SAME_WITH_NULL (0)
2973 || !OP_SAME (1))
2974 return 0;
2975 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2976 return OP_SAME_WITH_NULL (2);
2977
2978 case BIT_FIELD_REF:
2979 if (!OP_SAME (0))
2980 return 0;
2981 flags &= ~(OEP_CONSTANT_ADDRESS_OF|OEP_ADDRESS_OF);
2982 return OP_SAME (1) && OP_SAME (2);
2983
2984 default:
2985 return 0;
2986 }
2987
2988 case tcc_expression:
2989 switch (TREE_CODE (arg0))
2990 {
2991 case ADDR_EXPR:
2992 return operand_equal_p (TREE_OPERAND (arg0, 0),
2993 TREE_OPERAND (arg1, 0),
2994 flags | OEP_ADDRESS_OF);
2995
2996 case TRUTH_NOT_EXPR:
2997 return OP_SAME (0);
2998
2999 case TRUTH_ANDIF_EXPR:
3000 case TRUTH_ORIF_EXPR:
3001 return OP_SAME (0) && OP_SAME (1);
3002
3003 case FMA_EXPR:
3004 case WIDEN_MULT_PLUS_EXPR:
3005 case WIDEN_MULT_MINUS_EXPR:
3006 if (!OP_SAME (2))
3007 return 0;
3008 /* The multiplcation operands are commutative. */
3009 /* FALLTHRU */
3010
3011 case TRUTH_AND_EXPR:
3012 case TRUTH_OR_EXPR:
3013 case TRUTH_XOR_EXPR:
3014 if (OP_SAME (0) && OP_SAME (1))
3015 return 1;
3016
3017 /* Otherwise take into account this is a commutative operation. */
3018 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3019 TREE_OPERAND (arg1, 1), flags)
3020 && operand_equal_p (TREE_OPERAND (arg0, 1),
3021 TREE_OPERAND (arg1, 0), flags));
3022
3023 case COND_EXPR:
3024 case VEC_COND_EXPR:
3025 case DOT_PROD_EXPR:
3026 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3027
3028 default:
3029 return 0;
3030 }
3031
3032 case tcc_vl_exp:
3033 switch (TREE_CODE (arg0))
3034 {
3035 case CALL_EXPR:
3036 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3037 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3038 /* If not both CALL_EXPRs are either internal or normal function
3039 functions, then they are not equal. */
3040 return 0;
3041 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3042 {
3043 /* If the CALL_EXPRs call different internal functions, then they
3044 are not equal. */
3045 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3046 return 0;
3047 }
3048 else
3049 {
3050 /* If the CALL_EXPRs call different functions, then they are not
3051 equal. */
3052 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3053 flags))
3054 return 0;
3055 }
3056
3057 {
3058 unsigned int cef = call_expr_flags (arg0);
3059 if (flags & OEP_PURE_SAME)
3060 cef &= ECF_CONST | ECF_PURE;
3061 else
3062 cef &= ECF_CONST;
3063 if (!cef)
3064 return 0;
3065 }
3066
3067 /* Now see if all the arguments are the same. */
3068 {
3069 const_call_expr_arg_iterator iter0, iter1;
3070 const_tree a0, a1;
3071 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3072 a1 = first_const_call_expr_arg (arg1, &iter1);
3073 a0 && a1;
3074 a0 = next_const_call_expr_arg (&iter0),
3075 a1 = next_const_call_expr_arg (&iter1))
3076 if (! operand_equal_p (a0, a1, flags))
3077 return 0;
3078
3079 /* If we get here and both argument lists are exhausted
3080 then the CALL_EXPRs are equal. */
3081 return ! (a0 || a1);
3082 }
3083 default:
3084 return 0;
3085 }
3086
3087 case tcc_declaration:
3088 /* Consider __builtin_sqrt equal to sqrt. */
3089 return (TREE_CODE (arg0) == FUNCTION_DECL
3090 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3091 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3092 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3093
3094 default:
3095 return 0;
3096 }
3097
3098 #undef OP_SAME
3099 #undef OP_SAME_WITH_NULL
3100 }
3101 \f
3102 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3103 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3104
3105 When in doubt, return 0. */
3106
3107 static int
3108 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3109 {
3110 int unsignedp1, unsignedpo;
3111 tree primarg0, primarg1, primother;
3112 unsigned int correct_width;
3113
3114 if (operand_equal_p (arg0, arg1, 0))
3115 return 1;
3116
3117 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3118 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3119 return 0;
3120
3121 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3122 and see if the inner values are the same. This removes any
3123 signedness comparison, which doesn't matter here. */
3124 primarg0 = arg0, primarg1 = arg1;
3125 STRIP_NOPS (primarg0);
3126 STRIP_NOPS (primarg1);
3127 if (operand_equal_p (primarg0, primarg1, 0))
3128 return 1;
3129
3130 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3131 actual comparison operand, ARG0.
3132
3133 First throw away any conversions to wider types
3134 already present in the operands. */
3135
3136 primarg1 = get_narrower (arg1, &unsignedp1);
3137 primother = get_narrower (other, &unsignedpo);
3138
3139 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3140 if (unsignedp1 == unsignedpo
3141 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3142 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3143 {
3144 tree type = TREE_TYPE (arg0);
3145
3146 /* Make sure shorter operand is extended the right way
3147 to match the longer operand. */
3148 primarg1 = fold_convert (signed_or_unsigned_type_for
3149 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3150
3151 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3152 return 1;
3153 }
3154
3155 return 0;
3156 }
3157 \f
3158 /* See if ARG is an expression that is either a comparison or is performing
3159 arithmetic on comparisons. The comparisons must only be comparing
3160 two different values, which will be stored in *CVAL1 and *CVAL2; if
3161 they are nonzero it means that some operands have already been found.
3162 No variables may be used anywhere else in the expression except in the
3163 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3164 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3165
3166 If this is true, return 1. Otherwise, return zero. */
3167
3168 static int
3169 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3170 {
3171 enum tree_code code = TREE_CODE (arg);
3172 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3173
3174 /* We can handle some of the tcc_expression cases here. */
3175 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3176 tclass = tcc_unary;
3177 else if (tclass == tcc_expression
3178 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3179 || code == COMPOUND_EXPR))
3180 tclass = tcc_binary;
3181
3182 else if (tclass == tcc_expression && code == SAVE_EXPR
3183 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3184 {
3185 /* If we've already found a CVAL1 or CVAL2, this expression is
3186 two complex to handle. */
3187 if (*cval1 || *cval2)
3188 return 0;
3189
3190 tclass = tcc_unary;
3191 *save_p = 1;
3192 }
3193
3194 switch (tclass)
3195 {
3196 case tcc_unary:
3197 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3198
3199 case tcc_binary:
3200 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3201 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3202 cval1, cval2, save_p));
3203
3204 case tcc_constant:
3205 return 1;
3206
3207 case tcc_expression:
3208 if (code == COND_EXPR)
3209 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3210 cval1, cval2, save_p)
3211 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3212 cval1, cval2, save_p)
3213 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3214 cval1, cval2, save_p));
3215 return 0;
3216
3217 case tcc_comparison:
3218 /* First see if we can handle the first operand, then the second. For
3219 the second operand, we know *CVAL1 can't be zero. It must be that
3220 one side of the comparison is each of the values; test for the
3221 case where this isn't true by failing if the two operands
3222 are the same. */
3223
3224 if (operand_equal_p (TREE_OPERAND (arg, 0),
3225 TREE_OPERAND (arg, 1), 0))
3226 return 0;
3227
3228 if (*cval1 == 0)
3229 *cval1 = TREE_OPERAND (arg, 0);
3230 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3231 ;
3232 else if (*cval2 == 0)
3233 *cval2 = TREE_OPERAND (arg, 0);
3234 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3235 ;
3236 else
3237 return 0;
3238
3239 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3240 ;
3241 else if (*cval2 == 0)
3242 *cval2 = TREE_OPERAND (arg, 1);
3243 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3244 ;
3245 else
3246 return 0;
3247
3248 return 1;
3249
3250 default:
3251 return 0;
3252 }
3253 }
3254 \f
3255 /* ARG is a tree that is known to contain just arithmetic operations and
3256 comparisons. Evaluate the operations in the tree substituting NEW0 for
3257 any occurrence of OLD0 as an operand of a comparison and likewise for
3258 NEW1 and OLD1. */
3259
3260 static tree
3261 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3262 tree old1, tree new1)
3263 {
3264 tree type = TREE_TYPE (arg);
3265 enum tree_code code = TREE_CODE (arg);
3266 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3267
3268 /* We can handle some of the tcc_expression cases here. */
3269 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3270 tclass = tcc_unary;
3271 else if (tclass == tcc_expression
3272 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3273 tclass = tcc_binary;
3274
3275 switch (tclass)
3276 {
3277 case tcc_unary:
3278 return fold_build1_loc (loc, code, type,
3279 eval_subst (loc, TREE_OPERAND (arg, 0),
3280 old0, new0, old1, new1));
3281
3282 case tcc_binary:
3283 return fold_build2_loc (loc, code, type,
3284 eval_subst (loc, TREE_OPERAND (arg, 0),
3285 old0, new0, old1, new1),
3286 eval_subst (loc, TREE_OPERAND (arg, 1),
3287 old0, new0, old1, new1));
3288
3289 case tcc_expression:
3290 switch (code)
3291 {
3292 case SAVE_EXPR:
3293 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3294 old1, new1);
3295
3296 case COMPOUND_EXPR:
3297 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3298 old1, new1);
3299
3300 case COND_EXPR:
3301 return fold_build3_loc (loc, code, type,
3302 eval_subst (loc, TREE_OPERAND (arg, 0),
3303 old0, new0, old1, new1),
3304 eval_subst (loc, TREE_OPERAND (arg, 1),
3305 old0, new0, old1, new1),
3306 eval_subst (loc, TREE_OPERAND (arg, 2),
3307 old0, new0, old1, new1));
3308 default:
3309 break;
3310 }
3311 /* Fall through - ??? */
3312
3313 case tcc_comparison:
3314 {
3315 tree arg0 = TREE_OPERAND (arg, 0);
3316 tree arg1 = TREE_OPERAND (arg, 1);
3317
3318 /* We need to check both for exact equality and tree equality. The
3319 former will be true if the operand has a side-effect. In that
3320 case, we know the operand occurred exactly once. */
3321
3322 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3323 arg0 = new0;
3324 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3325 arg0 = new1;
3326
3327 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3328 arg1 = new0;
3329 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3330 arg1 = new1;
3331
3332 return fold_build2_loc (loc, code, type, arg0, arg1);
3333 }
3334
3335 default:
3336 return arg;
3337 }
3338 }
3339 \f
3340 /* Return a tree for the case when the result of an expression is RESULT
3341 converted to TYPE and OMITTED was previously an operand of the expression
3342 but is now not needed (e.g., we folded OMITTED * 0).
3343
3344 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3345 the conversion of RESULT to TYPE. */
3346
3347 tree
3348 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3349 {
3350 tree t = fold_convert_loc (loc, type, result);
3351
3352 /* If the resulting operand is an empty statement, just return the omitted
3353 statement casted to void. */
3354 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3355 return build1_loc (loc, NOP_EXPR, void_type_node,
3356 fold_ignored_result (omitted));
3357
3358 if (TREE_SIDE_EFFECTS (omitted))
3359 return build2_loc (loc, COMPOUND_EXPR, type,
3360 fold_ignored_result (omitted), t);
3361
3362 return non_lvalue_loc (loc, t);
3363 }
3364
3365 /* Return a tree for the case when the result of an expression is RESULT
3366 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3367 of the expression but are now not needed.
3368
3369 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3370 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3371 evaluated before OMITTED2. Otherwise, if neither has side effects,
3372 just do the conversion of RESULT to TYPE. */
3373
3374 tree
3375 omit_two_operands_loc (location_t loc, tree type, tree result,
3376 tree omitted1, tree omitted2)
3377 {
3378 tree t = fold_convert_loc (loc, type, result);
3379
3380 if (TREE_SIDE_EFFECTS (omitted2))
3381 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3382 if (TREE_SIDE_EFFECTS (omitted1))
3383 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3384
3385 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3386 }
3387
3388 \f
3389 /* Return a simplified tree node for the truth-negation of ARG. This
3390 never alters ARG itself. We assume that ARG is an operation that
3391 returns a truth value (0 or 1).
3392
3393 FIXME: one would think we would fold the result, but it causes
3394 problems with the dominator optimizer. */
3395
3396 static tree
3397 fold_truth_not_expr (location_t loc, tree arg)
3398 {
3399 tree type = TREE_TYPE (arg);
3400 enum tree_code code = TREE_CODE (arg);
3401 location_t loc1, loc2;
3402
3403 /* If this is a comparison, we can simply invert it, except for
3404 floating-point non-equality comparisons, in which case we just
3405 enclose a TRUTH_NOT_EXPR around what we have. */
3406
3407 if (TREE_CODE_CLASS (code) == tcc_comparison)
3408 {
3409 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3410 if (FLOAT_TYPE_P (op_type)
3411 && flag_trapping_math
3412 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3413 && code != NE_EXPR && code != EQ_EXPR)
3414 return NULL_TREE;
3415
3416 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3417 if (code == ERROR_MARK)
3418 return NULL_TREE;
3419
3420 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3421 TREE_OPERAND (arg, 1));
3422 }
3423
3424 switch (code)
3425 {
3426 case INTEGER_CST:
3427 return constant_boolean_node (integer_zerop (arg), type);
3428
3429 case TRUTH_AND_EXPR:
3430 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3431 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3432 return build2_loc (loc, TRUTH_OR_EXPR, type,
3433 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3434 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3435
3436 case TRUTH_OR_EXPR:
3437 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3438 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3439 return build2_loc (loc, TRUTH_AND_EXPR, type,
3440 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3441 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3442
3443 case TRUTH_XOR_EXPR:
3444 /* Here we can invert either operand. We invert the first operand
3445 unless the second operand is a TRUTH_NOT_EXPR in which case our
3446 result is the XOR of the first operand with the inside of the
3447 negation of the second operand. */
3448
3449 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3450 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3451 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3452 else
3453 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3454 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3455 TREE_OPERAND (arg, 1));
3456
3457 case TRUTH_ANDIF_EXPR:
3458 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3459 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3460 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3461 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3462 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3463
3464 case TRUTH_ORIF_EXPR:
3465 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3466 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3467 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3468 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3469 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3470
3471 case TRUTH_NOT_EXPR:
3472 return TREE_OPERAND (arg, 0);
3473
3474 case COND_EXPR:
3475 {
3476 tree arg1 = TREE_OPERAND (arg, 1);
3477 tree arg2 = TREE_OPERAND (arg, 2);
3478
3479 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3480 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3481
3482 /* A COND_EXPR may have a throw as one operand, which
3483 then has void type. Just leave void operands
3484 as they are. */
3485 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3486 VOID_TYPE_P (TREE_TYPE (arg1))
3487 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3488 VOID_TYPE_P (TREE_TYPE (arg2))
3489 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3490 }
3491
3492 case COMPOUND_EXPR:
3493 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3494 return build2_loc (loc, COMPOUND_EXPR, type,
3495 TREE_OPERAND (arg, 0),
3496 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3497
3498 case NON_LVALUE_EXPR:
3499 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3500 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3501
3502 CASE_CONVERT:
3503 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3504 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3505
3506 /* ... fall through ... */
3507
3508 case FLOAT_EXPR:
3509 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3510 return build1_loc (loc, TREE_CODE (arg), type,
3511 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3512
3513 case BIT_AND_EXPR:
3514 if (!integer_onep (TREE_OPERAND (arg, 1)))
3515 return NULL_TREE;
3516 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3517
3518 case SAVE_EXPR:
3519 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3520
3521 case CLEANUP_POINT_EXPR:
3522 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3523 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3524 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3525
3526 default:
3527 return NULL_TREE;
3528 }
3529 }
3530
3531 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3532 assume that ARG is an operation that returns a truth value (0 or 1
3533 for scalars, 0 or -1 for vectors). Return the folded expression if
3534 folding is successful. Otherwise, return NULL_TREE. */
3535
3536 static tree
3537 fold_invert_truthvalue (location_t loc, tree arg)
3538 {
3539 tree type = TREE_TYPE (arg);
3540 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3541 ? BIT_NOT_EXPR
3542 : TRUTH_NOT_EXPR,
3543 type, arg);
3544 }
3545
3546 /* Return a simplified tree node for the truth-negation of ARG. This
3547 never alters ARG itself. We assume that ARG is an operation that
3548 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3549
3550 tree
3551 invert_truthvalue_loc (location_t loc, tree arg)
3552 {
3553 if (TREE_CODE (arg) == ERROR_MARK)
3554 return arg;
3555
3556 tree type = TREE_TYPE (arg);
3557 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3558 ? BIT_NOT_EXPR
3559 : TRUTH_NOT_EXPR,
3560 type, arg);
3561 }
3562
3563 /* Given a bit-wise operation CODE applied to ARG0 and ARG1, see if both
3564 operands are another bit-wise operation with a common input. If so,
3565 distribute the bit operations to save an operation and possibly two if
3566 constants are involved. For example, convert
3567 (A | B) & (A | C) into A | (B & C)
3568 Further simplification will occur if B and C are constants.
3569
3570 If this optimization cannot be done, 0 will be returned. */
3571
3572 static tree
3573 distribute_bit_expr (location_t loc, enum tree_code code, tree type,
3574 tree arg0, tree arg1)
3575 {
3576 tree common;
3577 tree left, right;
3578
3579 if (TREE_CODE (arg0) != TREE_CODE (arg1)
3580 || TREE_CODE (arg0) == code
3581 || (TREE_CODE (arg0) != BIT_AND_EXPR
3582 && TREE_CODE (arg0) != BIT_IOR_EXPR))
3583 return 0;
3584
3585 if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0), 0))
3586 {
3587 common = TREE_OPERAND (arg0, 0);
3588 left = TREE_OPERAND (arg0, 1);
3589 right = TREE_OPERAND (arg1, 1);
3590 }
3591 else if (operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1), 0))
3592 {
3593 common = TREE_OPERAND (arg0, 0);
3594 left = TREE_OPERAND (arg0, 1);
3595 right = TREE_OPERAND (arg1, 0);
3596 }
3597 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 0), 0))
3598 {
3599 common = TREE_OPERAND (arg0, 1);
3600 left = TREE_OPERAND (arg0, 0);
3601 right = TREE_OPERAND (arg1, 1);
3602 }
3603 else if (operand_equal_p (TREE_OPERAND (arg0, 1), TREE_OPERAND (arg1, 1), 0))
3604 {
3605 common = TREE_OPERAND (arg0, 1);
3606 left = TREE_OPERAND (arg0, 0);
3607 right = TREE_OPERAND (arg1, 0);
3608 }
3609 else
3610 return 0;
3611
3612 common = fold_convert_loc (loc, type, common);
3613 left = fold_convert_loc (loc, type, left);
3614 right = fold_convert_loc (loc, type, right);
3615 return fold_build2_loc (loc, TREE_CODE (arg0), type, common,
3616 fold_build2_loc (loc, code, type, left, right));
3617 }
3618
3619 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3620 with code CODE. This optimization is unsafe. */
3621 static tree
3622 distribute_real_division (location_t loc, enum tree_code code, tree type,
3623 tree arg0, tree arg1)
3624 {
3625 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3626 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3627
3628 /* (A / C) +- (B / C) -> (A +- B) / C. */
3629 if (mul0 == mul1
3630 && operand_equal_p (TREE_OPERAND (arg0, 1),
3631 TREE_OPERAND (arg1, 1), 0))
3632 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3633 fold_build2_loc (loc, code, type,
3634 TREE_OPERAND (arg0, 0),
3635 TREE_OPERAND (arg1, 0)),
3636 TREE_OPERAND (arg0, 1));
3637
3638 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3639 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3640 TREE_OPERAND (arg1, 0), 0)
3641 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3642 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3643 {
3644 REAL_VALUE_TYPE r0, r1;
3645 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3646 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3647 if (!mul0)
3648 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3649 if (!mul1)
3650 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3651 real_arithmetic (&r0, code, &r0, &r1);
3652 return fold_build2_loc (loc, MULT_EXPR, type,
3653 TREE_OPERAND (arg0, 0),
3654 build_real (type, r0));
3655 }
3656
3657 return NULL_TREE;
3658 }
3659 \f
3660 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3661 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero. */
3662
3663 static tree
3664 make_bit_field_ref (location_t loc, tree inner, tree type,
3665 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos, int unsignedp)
3666 {
3667 tree result, bftype;
3668
3669 if (bitpos == 0)
3670 {
3671 tree size = TYPE_SIZE (TREE_TYPE (inner));
3672 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3673 || POINTER_TYPE_P (TREE_TYPE (inner)))
3674 && tree_fits_shwi_p (size)
3675 && tree_to_shwi (size) == bitsize)
3676 return fold_convert_loc (loc, type, inner);
3677 }
3678
3679 bftype = type;
3680 if (TYPE_PRECISION (bftype) != bitsize
3681 || TYPE_UNSIGNED (bftype) == !unsignedp)
3682 bftype = build_nonstandard_integer_type (bitsize, 0);
3683
3684 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3685 size_int (bitsize), bitsize_int (bitpos));
3686
3687 if (bftype != type)
3688 result = fold_convert_loc (loc, type, result);
3689
3690 return result;
3691 }
3692
3693 /* Optimize a bit-field compare.
3694
3695 There are two cases: First is a compare against a constant and the
3696 second is a comparison of two items where the fields are at the same
3697 bit position relative to the start of a chunk (byte, halfword, word)
3698 large enough to contain it. In these cases we can avoid the shift
3699 implicit in bitfield extractions.
3700
3701 For constants, we emit a compare of the shifted constant with the
3702 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3703 compared. For two fields at the same position, we do the ANDs with the
3704 similar mask and compare the result of the ANDs.
3705
3706 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3707 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3708 are the left and right operands of the comparison, respectively.
3709
3710 If the optimization described above can be done, we return the resulting
3711 tree. Otherwise we return zero. */
3712
3713 static tree
3714 optimize_bit_field_compare (location_t loc, enum tree_code code,
3715 tree compare_type, tree lhs, tree rhs)
3716 {
3717 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3718 tree type = TREE_TYPE (lhs);
3719 tree unsigned_type;
3720 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3721 machine_mode lmode, rmode, nmode;
3722 int lunsignedp, runsignedp;
3723 int lvolatilep = 0, rvolatilep = 0;
3724 tree linner, rinner = NULL_TREE;
3725 tree mask;
3726 tree offset;
3727
3728 /* Get all the information about the extractions being done. If the bit size
3729 if the same as the size of the underlying object, we aren't doing an
3730 extraction at all and so can do nothing. We also don't want to
3731 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3732 then will no longer be able to replace it. */
3733 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3734 &lunsignedp, &lvolatilep, false);
3735 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3736 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3737 return 0;
3738
3739 if (!const_p)
3740 {
3741 /* If this is not a constant, we can only do something if bit positions,
3742 sizes, and signedness are the same. */
3743 rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3744 &runsignedp, &rvolatilep, false);
3745
3746 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3747 || lunsignedp != runsignedp || offset != 0
3748 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3749 return 0;
3750 }
3751
3752 /* See if we can find a mode to refer to this field. We should be able to,
3753 but fail if we can't. */
3754 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3755 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3756 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3757 TYPE_ALIGN (TREE_TYPE (rinner))),
3758 word_mode, false);
3759 if (nmode == VOIDmode)
3760 return 0;
3761
3762 /* Set signed and unsigned types of the precision of this mode for the
3763 shifts below. */
3764 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3765
3766 /* Compute the bit position and size for the new reference and our offset
3767 within it. If the new reference is the same size as the original, we
3768 won't optimize anything, so return zero. */
3769 nbitsize = GET_MODE_BITSIZE (nmode);
3770 nbitpos = lbitpos & ~ (nbitsize - 1);
3771 lbitpos -= nbitpos;
3772 if (nbitsize == lbitsize)
3773 return 0;
3774
3775 if (BYTES_BIG_ENDIAN)
3776 lbitpos = nbitsize - lbitsize - lbitpos;
3777
3778 /* Make the mask to be used against the extracted field. */
3779 mask = build_int_cst_type (unsigned_type, -1);
3780 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3781 mask = const_binop (RSHIFT_EXPR, mask,
3782 size_int (nbitsize - lbitsize - lbitpos));
3783
3784 if (! const_p)
3785 /* If not comparing with constant, just rework the comparison
3786 and return. */
3787 return fold_build2_loc (loc, code, compare_type,
3788 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3789 make_bit_field_ref (loc, linner,
3790 unsigned_type,
3791 nbitsize, nbitpos,
3792 1),
3793 mask),
3794 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3795 make_bit_field_ref (loc, rinner,
3796 unsigned_type,
3797 nbitsize, nbitpos,
3798 1),
3799 mask));
3800
3801 /* Otherwise, we are handling the constant case. See if the constant is too
3802 big for the field. Warn and return a tree of for 0 (false) if so. We do
3803 this not only for its own sake, but to avoid having to test for this
3804 error case below. If we didn't, we might generate wrong code.
3805
3806 For unsigned fields, the constant shifted right by the field length should
3807 be all zero. For signed fields, the high-order bits should agree with
3808 the sign bit. */
3809
3810 if (lunsignedp)
3811 {
3812 if (wi::lrshift (rhs, lbitsize) != 0)
3813 {
3814 warning (0, "comparison is always %d due to width of bit-field",
3815 code == NE_EXPR);
3816 return constant_boolean_node (code == NE_EXPR, compare_type);
3817 }
3818 }
3819 else
3820 {
3821 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3822 if (tem != 0 && tem != -1)
3823 {
3824 warning (0, "comparison is always %d due to width of bit-field",
3825 code == NE_EXPR);
3826 return constant_boolean_node (code == NE_EXPR, compare_type);
3827 }
3828 }
3829
3830 /* Single-bit compares should always be against zero. */
3831 if (lbitsize == 1 && ! integer_zerop (rhs))
3832 {
3833 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3834 rhs = build_int_cst (type, 0);
3835 }
3836
3837 /* Make a new bitfield reference, shift the constant over the
3838 appropriate number of bits and mask it with the computed mask
3839 (in case this was a signed field). If we changed it, make a new one. */
3840 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1);
3841
3842 rhs = const_binop (BIT_AND_EXPR,
3843 const_binop (LSHIFT_EXPR,
3844 fold_convert_loc (loc, unsigned_type, rhs),
3845 size_int (lbitpos)),
3846 mask);
3847
3848 lhs = build2_loc (loc, code, compare_type,
3849 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3850 return lhs;
3851 }
3852 \f
3853 /* Subroutine for fold_truth_andor_1: decode a field reference.
3854
3855 If EXP is a comparison reference, we return the innermost reference.
3856
3857 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3858 set to the starting bit number.
3859
3860 If the innermost field can be completely contained in a mode-sized
3861 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3862
3863 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3864 otherwise it is not changed.
3865
3866 *PUNSIGNEDP is set to the signedness of the field.
3867
3868 *PMASK is set to the mask used. This is either contained in a
3869 BIT_AND_EXPR or derived from the width of the field.
3870
3871 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3872
3873 Return 0 if this is not a component reference or is one that we can't
3874 do anything with. */
3875
3876 static tree
3877 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3878 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3879 int *punsignedp, int *pvolatilep,
3880 tree *pmask, tree *pand_mask)
3881 {
3882 tree outer_type = 0;
3883 tree and_mask = 0;
3884 tree mask, inner, offset;
3885 tree unsigned_type;
3886 unsigned int precision;
3887
3888 /* All the optimizations using this function assume integer fields.
3889 There are problems with FP fields since the type_for_size call
3890 below can fail for, e.g., XFmode. */
3891 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3892 return 0;
3893
3894 /* We are interested in the bare arrangement of bits, so strip everything
3895 that doesn't affect the machine mode. However, record the type of the
3896 outermost expression if it may matter below. */
3897 if (CONVERT_EXPR_P (exp)
3898 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3899 outer_type = TREE_TYPE (exp);
3900 STRIP_NOPS (exp);
3901
3902 if (TREE_CODE (exp) == BIT_AND_EXPR)
3903 {
3904 and_mask = TREE_OPERAND (exp, 1);
3905 exp = TREE_OPERAND (exp, 0);
3906 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
3907 if (TREE_CODE (and_mask) != INTEGER_CST)
3908 return 0;
3909 }
3910
3911 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
3912 punsignedp, pvolatilep, false);
3913 if ((inner == exp && and_mask == 0)
3914 || *pbitsize < 0 || offset != 0
3915 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
3916 return 0;
3917
3918 /* If the number of bits in the reference is the same as the bitsize of
3919 the outer type, then the outer type gives the signedness. Otherwise
3920 (in case of a small bitfield) the signedness is unchanged. */
3921 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
3922 *punsignedp = TYPE_UNSIGNED (outer_type);
3923
3924 /* Compute the mask to access the bitfield. */
3925 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
3926 precision = TYPE_PRECISION (unsigned_type);
3927
3928 mask = build_int_cst_type (unsigned_type, -1);
3929
3930 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3931 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
3932
3933 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
3934 if (and_mask != 0)
3935 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3936 fold_convert_loc (loc, unsigned_type, and_mask), mask);
3937
3938 *pmask = mask;
3939 *pand_mask = and_mask;
3940 return inner;
3941 }
3942
3943 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
3944 bit positions and MASK is SIGNED. */
3945
3946 static int
3947 all_ones_mask_p (const_tree mask, unsigned int size)
3948 {
3949 tree type = TREE_TYPE (mask);
3950 unsigned int precision = TYPE_PRECISION (type);
3951
3952 /* If this function returns true when the type of the mask is
3953 UNSIGNED, then there will be errors. In particular see
3954 gcc.c-torture/execute/990326-1.c. There does not appear to be
3955 any documentation paper trail as to why this is so. But the pre
3956 wide-int worked with that restriction and it has been preserved
3957 here. */
3958 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
3959 return false;
3960
3961 return wi::mask (size, false, precision) == mask;
3962 }
3963
3964 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
3965 represents the sign bit of EXP's type. If EXP represents a sign
3966 or zero extension, also test VAL against the unextended type.
3967 The return value is the (sub)expression whose sign bit is VAL,
3968 or NULL_TREE otherwise. */
3969
3970 tree
3971 sign_bit_p (tree exp, const_tree val)
3972 {
3973 int width;
3974 tree t;
3975
3976 /* Tree EXP must have an integral type. */
3977 t = TREE_TYPE (exp);
3978 if (! INTEGRAL_TYPE_P (t))
3979 return NULL_TREE;
3980
3981 /* Tree VAL must be an integer constant. */
3982 if (TREE_CODE (val) != INTEGER_CST
3983 || TREE_OVERFLOW (val))
3984 return NULL_TREE;
3985
3986 width = TYPE_PRECISION (t);
3987 if (wi::only_sign_bit_p (val, width))
3988 return exp;
3989
3990 /* Handle extension from a narrower type. */
3991 if (TREE_CODE (exp) == NOP_EXPR
3992 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
3993 return sign_bit_p (TREE_OPERAND (exp, 0), val);
3994
3995 return NULL_TREE;
3996 }
3997
3998 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
3999 to be evaluated unconditionally. */
4000
4001 static int
4002 simple_operand_p (const_tree exp)
4003 {
4004 /* Strip any conversions that don't change the machine mode. */
4005 STRIP_NOPS (exp);
4006
4007 return (CONSTANT_CLASS_P (exp)
4008 || TREE_CODE (exp) == SSA_NAME
4009 || (DECL_P (exp)
4010 && ! TREE_ADDRESSABLE (exp)
4011 && ! TREE_THIS_VOLATILE (exp)
4012 && ! DECL_NONLOCAL (exp)
4013 /* Don't regard global variables as simple. They may be
4014 allocated in ways unknown to the compiler (shared memory,
4015 #pragma weak, etc). */
4016 && ! TREE_PUBLIC (exp)
4017 && ! DECL_EXTERNAL (exp)
4018 /* Weakrefs are not safe to be read, since they can be NULL.
4019 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4020 have DECL_WEAK flag set. */
4021 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4022 /* Loading a static variable is unduly expensive, but global
4023 registers aren't expensive. */
4024 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4025 }
4026
4027 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4028 to be evaluated unconditionally.
4029 I addition to simple_operand_p, we assume that comparisons, conversions,
4030 and logic-not operations are simple, if their operands are simple, too. */
4031
4032 static bool
4033 simple_operand_p_2 (tree exp)
4034 {
4035 enum tree_code code;
4036
4037 if (TREE_SIDE_EFFECTS (exp)
4038 || tree_could_trap_p (exp))
4039 return false;
4040
4041 while (CONVERT_EXPR_P (exp))
4042 exp = TREE_OPERAND (exp, 0);
4043
4044 code = TREE_CODE (exp);
4045
4046 if (TREE_CODE_CLASS (code) == tcc_comparison)
4047 return (simple_operand_p (TREE_OPERAND (exp, 0))
4048 && simple_operand_p (TREE_OPERAND (exp, 1)));
4049
4050 if (code == TRUTH_NOT_EXPR)
4051 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4052
4053 return simple_operand_p (exp);
4054 }
4055
4056 \f
4057 /* The following functions are subroutines to fold_range_test and allow it to
4058 try to change a logical combination of comparisons into a range test.
4059
4060 For example, both
4061 X == 2 || X == 3 || X == 4 || X == 5
4062 and
4063 X >= 2 && X <= 5
4064 are converted to
4065 (unsigned) (X - 2) <= 3
4066
4067 We describe each set of comparisons as being either inside or outside
4068 a range, using a variable named like IN_P, and then describe the
4069 range with a lower and upper bound. If one of the bounds is omitted,
4070 it represents either the highest or lowest value of the type.
4071
4072 In the comments below, we represent a range by two numbers in brackets
4073 preceded by a "+" to designate being inside that range, or a "-" to
4074 designate being outside that range, so the condition can be inverted by
4075 flipping the prefix. An omitted bound is represented by a "-". For
4076 example, "- [-, 10]" means being outside the range starting at the lowest
4077 possible value and ending at 10, in other words, being greater than 10.
4078 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4079 always false.
4080
4081 We set up things so that the missing bounds are handled in a consistent
4082 manner so neither a missing bound nor "true" and "false" need to be
4083 handled using a special case. */
4084
4085 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4086 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4087 and UPPER1_P are nonzero if the respective argument is an upper bound
4088 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4089 must be specified for a comparison. ARG1 will be converted to ARG0's
4090 type if both are specified. */
4091
4092 static tree
4093 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4094 tree arg1, int upper1_p)
4095 {
4096 tree tem;
4097 int result;
4098 int sgn0, sgn1;
4099
4100 /* If neither arg represents infinity, do the normal operation.
4101 Else, if not a comparison, return infinity. Else handle the special
4102 comparison rules. Note that most of the cases below won't occur, but
4103 are handled for consistency. */
4104
4105 if (arg0 != 0 && arg1 != 0)
4106 {
4107 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4108 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4109 STRIP_NOPS (tem);
4110 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4111 }
4112
4113 if (TREE_CODE_CLASS (code) != tcc_comparison)
4114 return 0;
4115
4116 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4117 for neither. In real maths, we cannot assume open ended ranges are
4118 the same. But, this is computer arithmetic, where numbers are finite.
4119 We can therefore make the transformation of any unbounded range with
4120 the value Z, Z being greater than any representable number. This permits
4121 us to treat unbounded ranges as equal. */
4122 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4123 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4124 switch (code)
4125 {
4126 case EQ_EXPR:
4127 result = sgn0 == sgn1;
4128 break;
4129 case NE_EXPR:
4130 result = sgn0 != sgn1;
4131 break;
4132 case LT_EXPR:
4133 result = sgn0 < sgn1;
4134 break;
4135 case LE_EXPR:
4136 result = sgn0 <= sgn1;
4137 break;
4138 case GT_EXPR:
4139 result = sgn0 > sgn1;
4140 break;
4141 case GE_EXPR:
4142 result = sgn0 >= sgn1;
4143 break;
4144 default:
4145 gcc_unreachable ();
4146 }
4147
4148 return constant_boolean_node (result, type);
4149 }
4150 \f
4151 /* Helper routine for make_range. Perform one step for it, return
4152 new expression if the loop should continue or NULL_TREE if it should
4153 stop. */
4154
4155 tree
4156 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4157 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4158 bool *strict_overflow_p)
4159 {
4160 tree arg0_type = TREE_TYPE (arg0);
4161 tree n_low, n_high, low = *p_low, high = *p_high;
4162 int in_p = *p_in_p, n_in_p;
4163
4164 switch (code)
4165 {
4166 case TRUTH_NOT_EXPR:
4167 /* We can only do something if the range is testing for zero. */
4168 if (low == NULL_TREE || high == NULL_TREE
4169 || ! integer_zerop (low) || ! integer_zerop (high))
4170 return NULL_TREE;
4171 *p_in_p = ! in_p;
4172 return arg0;
4173
4174 case EQ_EXPR: case NE_EXPR:
4175 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4176 /* We can only do something if the range is testing for zero
4177 and if the second operand is an integer constant. Note that
4178 saying something is "in" the range we make is done by
4179 complementing IN_P since it will set in the initial case of
4180 being not equal to zero; "out" is leaving it alone. */
4181 if (low == NULL_TREE || high == NULL_TREE
4182 || ! integer_zerop (low) || ! integer_zerop (high)
4183 || TREE_CODE (arg1) != INTEGER_CST)
4184 return NULL_TREE;
4185
4186 switch (code)
4187 {
4188 case NE_EXPR: /* - [c, c] */
4189 low = high = arg1;
4190 break;
4191 case EQ_EXPR: /* + [c, c] */
4192 in_p = ! in_p, low = high = arg1;
4193 break;
4194 case GT_EXPR: /* - [-, c] */
4195 low = 0, high = arg1;
4196 break;
4197 case GE_EXPR: /* + [c, -] */
4198 in_p = ! in_p, low = arg1, high = 0;
4199 break;
4200 case LT_EXPR: /* - [c, -] */
4201 low = arg1, high = 0;
4202 break;
4203 case LE_EXPR: /* + [-, c] */
4204 in_p = ! in_p, low = 0, high = arg1;
4205 break;
4206 default:
4207 gcc_unreachable ();
4208 }
4209
4210 /* If this is an unsigned comparison, we also know that EXP is
4211 greater than or equal to zero. We base the range tests we make
4212 on that fact, so we record it here so we can parse existing
4213 range tests. We test arg0_type since often the return type
4214 of, e.g. EQ_EXPR, is boolean. */
4215 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4216 {
4217 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4218 in_p, low, high, 1,
4219 build_int_cst (arg0_type, 0),
4220 NULL_TREE))
4221 return NULL_TREE;
4222
4223 in_p = n_in_p, low = n_low, high = n_high;
4224
4225 /* If the high bound is missing, but we have a nonzero low
4226 bound, reverse the range so it goes from zero to the low bound
4227 minus 1. */
4228 if (high == 0 && low && ! integer_zerop (low))
4229 {
4230 in_p = ! in_p;
4231 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4232 build_int_cst (TREE_TYPE (low), 1), 0);
4233 low = build_int_cst (arg0_type, 0);
4234 }
4235 }
4236
4237 *p_low = low;
4238 *p_high = high;
4239 *p_in_p = in_p;
4240 return arg0;
4241
4242 case NEGATE_EXPR:
4243 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4244 low and high are non-NULL, then normalize will DTRT. */
4245 if (!TYPE_UNSIGNED (arg0_type)
4246 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4247 {
4248 if (low == NULL_TREE)
4249 low = TYPE_MIN_VALUE (arg0_type);
4250 if (high == NULL_TREE)
4251 high = TYPE_MAX_VALUE (arg0_type);
4252 }
4253
4254 /* (-x) IN [a,b] -> x in [-b, -a] */
4255 n_low = range_binop (MINUS_EXPR, exp_type,
4256 build_int_cst (exp_type, 0),
4257 0, high, 1);
4258 n_high = range_binop (MINUS_EXPR, exp_type,
4259 build_int_cst (exp_type, 0),
4260 0, low, 0);
4261 if (n_high != 0 && TREE_OVERFLOW (n_high))
4262 return NULL_TREE;
4263 goto normalize;
4264
4265 case BIT_NOT_EXPR:
4266 /* ~ X -> -X - 1 */
4267 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4268 build_int_cst (exp_type, 1));
4269
4270 case PLUS_EXPR:
4271 case MINUS_EXPR:
4272 if (TREE_CODE (arg1) != INTEGER_CST)
4273 return NULL_TREE;
4274
4275 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4276 move a constant to the other side. */
4277 if (!TYPE_UNSIGNED (arg0_type)
4278 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4279 return NULL_TREE;
4280
4281 /* If EXP is signed, any overflow in the computation is undefined,
4282 so we don't worry about it so long as our computations on
4283 the bounds don't overflow. For unsigned, overflow is defined
4284 and this is exactly the right thing. */
4285 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4286 arg0_type, low, 0, arg1, 0);
4287 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4288 arg0_type, high, 1, arg1, 0);
4289 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4290 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4291 return NULL_TREE;
4292
4293 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4294 *strict_overflow_p = true;
4295
4296 normalize:
4297 /* Check for an unsigned range which has wrapped around the maximum
4298 value thus making n_high < n_low, and normalize it. */
4299 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4300 {
4301 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4302 build_int_cst (TREE_TYPE (n_high), 1), 0);
4303 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4304 build_int_cst (TREE_TYPE (n_low), 1), 0);
4305
4306 /* If the range is of the form +/- [ x+1, x ], we won't
4307 be able to normalize it. But then, it represents the
4308 whole range or the empty set, so make it
4309 +/- [ -, - ]. */
4310 if (tree_int_cst_equal (n_low, low)
4311 && tree_int_cst_equal (n_high, high))
4312 low = high = 0;
4313 else
4314 in_p = ! in_p;
4315 }
4316 else
4317 low = n_low, high = n_high;
4318
4319 *p_low = low;
4320 *p_high = high;
4321 *p_in_p = in_p;
4322 return arg0;
4323
4324 CASE_CONVERT:
4325 case NON_LVALUE_EXPR:
4326 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4327 return NULL_TREE;
4328
4329 if (! INTEGRAL_TYPE_P (arg0_type)
4330 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4331 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4332 return NULL_TREE;
4333
4334 n_low = low, n_high = high;
4335
4336 if (n_low != 0)
4337 n_low = fold_convert_loc (loc, arg0_type, n_low);
4338
4339 if (n_high != 0)
4340 n_high = fold_convert_loc (loc, arg0_type, n_high);
4341
4342 /* If we're converting arg0 from an unsigned type, to exp,
4343 a signed type, we will be doing the comparison as unsigned.
4344 The tests above have already verified that LOW and HIGH
4345 are both positive.
4346
4347 So we have to ensure that we will handle large unsigned
4348 values the same way that the current signed bounds treat
4349 negative values. */
4350
4351 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4352 {
4353 tree high_positive;
4354 tree equiv_type;
4355 /* For fixed-point modes, we need to pass the saturating flag
4356 as the 2nd parameter. */
4357 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4358 equiv_type
4359 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4360 TYPE_SATURATING (arg0_type));
4361 else
4362 equiv_type
4363 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4364
4365 /* A range without an upper bound is, naturally, unbounded.
4366 Since convert would have cropped a very large value, use
4367 the max value for the destination type. */
4368 high_positive
4369 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4370 : TYPE_MAX_VALUE (arg0_type);
4371
4372 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4373 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4374 fold_convert_loc (loc, arg0_type,
4375 high_positive),
4376 build_int_cst (arg0_type, 1));
4377
4378 /* If the low bound is specified, "and" the range with the
4379 range for which the original unsigned value will be
4380 positive. */
4381 if (low != 0)
4382 {
4383 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4384 1, fold_convert_loc (loc, arg0_type,
4385 integer_zero_node),
4386 high_positive))
4387 return NULL_TREE;
4388
4389 in_p = (n_in_p == in_p);
4390 }
4391 else
4392 {
4393 /* Otherwise, "or" the range with the range of the input
4394 that will be interpreted as negative. */
4395 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4396 1, fold_convert_loc (loc, arg0_type,
4397 integer_zero_node),
4398 high_positive))
4399 return NULL_TREE;
4400
4401 in_p = (in_p != n_in_p);
4402 }
4403 }
4404
4405 *p_low = n_low;
4406 *p_high = n_high;
4407 *p_in_p = in_p;
4408 return arg0;
4409
4410 default:
4411 return NULL_TREE;
4412 }
4413 }
4414
4415 /* Given EXP, a logical expression, set the range it is testing into
4416 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4417 actually being tested. *PLOW and *PHIGH will be made of the same
4418 type as the returned expression. If EXP is not a comparison, we
4419 will most likely not be returning a useful value and range. Set
4420 *STRICT_OVERFLOW_P to true if the return value is only valid
4421 because signed overflow is undefined; otherwise, do not change
4422 *STRICT_OVERFLOW_P. */
4423
4424 tree
4425 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4426 bool *strict_overflow_p)
4427 {
4428 enum tree_code code;
4429 tree arg0, arg1 = NULL_TREE;
4430 tree exp_type, nexp;
4431 int in_p;
4432 tree low, high;
4433 location_t loc = EXPR_LOCATION (exp);
4434
4435 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4436 and see if we can refine the range. Some of the cases below may not
4437 happen, but it doesn't seem worth worrying about this. We "continue"
4438 the outer loop when we've changed something; otherwise we "break"
4439 the switch, which will "break" the while. */
4440
4441 in_p = 0;
4442 low = high = build_int_cst (TREE_TYPE (exp), 0);
4443
4444 while (1)
4445 {
4446 code = TREE_CODE (exp);
4447 exp_type = TREE_TYPE (exp);
4448 arg0 = NULL_TREE;
4449
4450 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4451 {
4452 if (TREE_OPERAND_LENGTH (exp) > 0)
4453 arg0 = TREE_OPERAND (exp, 0);
4454 if (TREE_CODE_CLASS (code) == tcc_binary
4455 || TREE_CODE_CLASS (code) == tcc_comparison
4456 || (TREE_CODE_CLASS (code) == tcc_expression
4457 && TREE_OPERAND_LENGTH (exp) > 1))
4458 arg1 = TREE_OPERAND (exp, 1);
4459 }
4460 if (arg0 == NULL_TREE)
4461 break;
4462
4463 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4464 &high, &in_p, strict_overflow_p);
4465 if (nexp == NULL_TREE)
4466 break;
4467 exp = nexp;
4468 }
4469
4470 /* If EXP is a constant, we can evaluate whether this is true or false. */
4471 if (TREE_CODE (exp) == INTEGER_CST)
4472 {
4473 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4474 exp, 0, low, 0))
4475 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4476 exp, 1, high, 1)));
4477 low = high = 0;
4478 exp = 0;
4479 }
4480
4481 *pin_p = in_p, *plow = low, *phigh = high;
4482 return exp;
4483 }
4484 \f
4485 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4486 type, TYPE, return an expression to test if EXP is in (or out of, depending
4487 on IN_P) the range. Return 0 if the test couldn't be created. */
4488
4489 tree
4490 build_range_check (location_t loc, tree type, tree exp, int in_p,
4491 tree low, tree high)
4492 {
4493 tree etype = TREE_TYPE (exp), value;
4494
4495 #ifdef HAVE_canonicalize_funcptr_for_compare
4496 /* Disable this optimization for function pointer expressions
4497 on targets that require function pointer canonicalization. */
4498 if (HAVE_canonicalize_funcptr_for_compare
4499 && TREE_CODE (etype) == POINTER_TYPE
4500 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4501 return NULL_TREE;
4502 #endif
4503
4504 if (! in_p)
4505 {
4506 value = build_range_check (loc, type, exp, 1, low, high);
4507 if (value != 0)
4508 return invert_truthvalue_loc (loc, value);
4509
4510 return 0;
4511 }
4512
4513 if (low == 0 && high == 0)
4514 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4515
4516 if (low == 0)
4517 return fold_build2_loc (loc, LE_EXPR, type, exp,
4518 fold_convert_loc (loc, etype, high));
4519
4520 if (high == 0)
4521 return fold_build2_loc (loc, GE_EXPR, type, exp,
4522 fold_convert_loc (loc, etype, low));
4523
4524 if (operand_equal_p (low, high, 0))
4525 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4526 fold_convert_loc (loc, etype, low));
4527
4528 if (integer_zerop (low))
4529 {
4530 if (! TYPE_UNSIGNED (etype))
4531 {
4532 etype = unsigned_type_for (etype);
4533 high = fold_convert_loc (loc, etype, high);
4534 exp = fold_convert_loc (loc, etype, exp);
4535 }
4536 return build_range_check (loc, type, exp, 1, 0, high);
4537 }
4538
4539 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4540 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4541 {
4542 int prec = TYPE_PRECISION (etype);
4543
4544 if (wi::mask (prec - 1, false, prec) == high)
4545 {
4546 if (TYPE_UNSIGNED (etype))
4547 {
4548 tree signed_etype = signed_type_for (etype);
4549 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4550 etype
4551 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4552 else
4553 etype = signed_etype;
4554 exp = fold_convert_loc (loc, etype, exp);
4555 }
4556 return fold_build2_loc (loc, GT_EXPR, type, exp,
4557 build_int_cst (etype, 0));
4558 }
4559 }
4560
4561 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4562 This requires wrap-around arithmetics for the type of the expression.
4563 First make sure that arithmetics in this type is valid, then make sure
4564 that it wraps around. */
4565 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4566 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4567 TYPE_UNSIGNED (etype));
4568
4569 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4570 {
4571 tree utype, minv, maxv;
4572
4573 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4574 for the type in question, as we rely on this here. */
4575 utype = unsigned_type_for (etype);
4576 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4577 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4578 build_int_cst (TREE_TYPE (maxv), 1), 1);
4579 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4580
4581 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4582 minv, 1, maxv, 1)))
4583 etype = utype;
4584 else
4585 return 0;
4586 }
4587
4588 high = fold_convert_loc (loc, etype, high);
4589 low = fold_convert_loc (loc, etype, low);
4590 exp = fold_convert_loc (loc, etype, exp);
4591
4592 value = const_binop (MINUS_EXPR, high, low);
4593
4594
4595 if (POINTER_TYPE_P (etype))
4596 {
4597 if (value != 0 && !TREE_OVERFLOW (value))
4598 {
4599 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4600 return build_range_check (loc, type,
4601 fold_build_pointer_plus_loc (loc, exp, low),
4602 1, build_int_cst (etype, 0), value);
4603 }
4604 return 0;
4605 }
4606
4607 if (value != 0 && !TREE_OVERFLOW (value))
4608 return build_range_check (loc, type,
4609 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4610 1, build_int_cst (etype, 0), value);
4611
4612 return 0;
4613 }
4614 \f
4615 /* Return the predecessor of VAL in its type, handling the infinite case. */
4616
4617 static tree
4618 range_predecessor (tree val)
4619 {
4620 tree type = TREE_TYPE (val);
4621
4622 if (INTEGRAL_TYPE_P (type)
4623 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4624 return 0;
4625 else
4626 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4627 build_int_cst (TREE_TYPE (val), 1), 0);
4628 }
4629
4630 /* Return the successor of VAL in its type, handling the infinite case. */
4631
4632 static tree
4633 range_successor (tree val)
4634 {
4635 tree type = TREE_TYPE (val);
4636
4637 if (INTEGRAL_TYPE_P (type)
4638 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4639 return 0;
4640 else
4641 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4642 build_int_cst (TREE_TYPE (val), 1), 0);
4643 }
4644
4645 /* Given two ranges, see if we can merge them into one. Return 1 if we
4646 can, 0 if we can't. Set the output range into the specified parameters. */
4647
4648 bool
4649 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4650 tree high0, int in1_p, tree low1, tree high1)
4651 {
4652 int no_overlap;
4653 int subset;
4654 int temp;
4655 tree tem;
4656 int in_p;
4657 tree low, high;
4658 int lowequal = ((low0 == 0 && low1 == 0)
4659 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4660 low0, 0, low1, 0)));
4661 int highequal = ((high0 == 0 && high1 == 0)
4662 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4663 high0, 1, high1, 1)));
4664
4665 /* Make range 0 be the range that starts first, or ends last if they
4666 start at the same value. Swap them if it isn't. */
4667 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4668 low0, 0, low1, 0))
4669 || (lowequal
4670 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4671 high1, 1, high0, 1))))
4672 {
4673 temp = in0_p, in0_p = in1_p, in1_p = temp;
4674 tem = low0, low0 = low1, low1 = tem;
4675 tem = high0, high0 = high1, high1 = tem;
4676 }
4677
4678 /* Now flag two cases, whether the ranges are disjoint or whether the
4679 second range is totally subsumed in the first. Note that the tests
4680 below are simplified by the ones above. */
4681 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4682 high0, 1, low1, 0));
4683 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4684 high1, 1, high0, 1));
4685
4686 /* We now have four cases, depending on whether we are including or
4687 excluding the two ranges. */
4688 if (in0_p && in1_p)
4689 {
4690 /* If they don't overlap, the result is false. If the second range
4691 is a subset it is the result. Otherwise, the range is from the start
4692 of the second to the end of the first. */
4693 if (no_overlap)
4694 in_p = 0, low = high = 0;
4695 else if (subset)
4696 in_p = 1, low = low1, high = high1;
4697 else
4698 in_p = 1, low = low1, high = high0;
4699 }
4700
4701 else if (in0_p && ! in1_p)
4702 {
4703 /* If they don't overlap, the result is the first range. If they are
4704 equal, the result is false. If the second range is a subset of the
4705 first, and the ranges begin at the same place, we go from just after
4706 the end of the second range to the end of the first. If the second
4707 range is not a subset of the first, or if it is a subset and both
4708 ranges end at the same place, the range starts at the start of the
4709 first range and ends just before the second range.
4710 Otherwise, we can't describe this as a single range. */
4711 if (no_overlap)
4712 in_p = 1, low = low0, high = high0;
4713 else if (lowequal && highequal)
4714 in_p = 0, low = high = 0;
4715 else if (subset && lowequal)
4716 {
4717 low = range_successor (high1);
4718 high = high0;
4719 in_p = 1;
4720 if (low == 0)
4721 {
4722 /* We are in the weird situation where high0 > high1 but
4723 high1 has no successor. Punt. */
4724 return 0;
4725 }
4726 }
4727 else if (! subset || highequal)
4728 {
4729 low = low0;
4730 high = range_predecessor (low1);
4731 in_p = 1;
4732 if (high == 0)
4733 {
4734 /* low0 < low1 but low1 has no predecessor. Punt. */
4735 return 0;
4736 }
4737 }
4738 else
4739 return 0;
4740 }
4741
4742 else if (! in0_p && in1_p)
4743 {
4744 /* If they don't overlap, the result is the second range. If the second
4745 is a subset of the first, the result is false. Otherwise,
4746 the range starts just after the first range and ends at the
4747 end of the second. */
4748 if (no_overlap)
4749 in_p = 1, low = low1, high = high1;
4750 else if (subset || highequal)
4751 in_p = 0, low = high = 0;
4752 else
4753 {
4754 low = range_successor (high0);
4755 high = high1;
4756 in_p = 1;
4757 if (low == 0)
4758 {
4759 /* high1 > high0 but high0 has no successor. Punt. */
4760 return 0;
4761 }
4762 }
4763 }
4764
4765 else
4766 {
4767 /* The case where we are excluding both ranges. Here the complex case
4768 is if they don't overlap. In that case, the only time we have a
4769 range is if they are adjacent. If the second is a subset of the
4770 first, the result is the first. Otherwise, the range to exclude
4771 starts at the beginning of the first range and ends at the end of the
4772 second. */
4773 if (no_overlap)
4774 {
4775 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4776 range_successor (high0),
4777 1, low1, 0)))
4778 in_p = 0, low = low0, high = high1;
4779 else
4780 {
4781 /* Canonicalize - [min, x] into - [-, x]. */
4782 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4783 switch (TREE_CODE (TREE_TYPE (low0)))
4784 {
4785 case ENUMERAL_TYPE:
4786 if (TYPE_PRECISION (TREE_TYPE (low0))
4787 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4788 break;
4789 /* FALLTHROUGH */
4790 case INTEGER_TYPE:
4791 if (tree_int_cst_equal (low0,
4792 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4793 low0 = 0;
4794 break;
4795 case POINTER_TYPE:
4796 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4797 && integer_zerop (low0))
4798 low0 = 0;
4799 break;
4800 default:
4801 break;
4802 }
4803
4804 /* Canonicalize - [x, max] into - [x, -]. */
4805 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4806 switch (TREE_CODE (TREE_TYPE (high1)))
4807 {
4808 case ENUMERAL_TYPE:
4809 if (TYPE_PRECISION (TREE_TYPE (high1))
4810 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4811 break;
4812 /* FALLTHROUGH */
4813 case INTEGER_TYPE:
4814 if (tree_int_cst_equal (high1,
4815 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4816 high1 = 0;
4817 break;
4818 case POINTER_TYPE:
4819 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4820 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4821 high1, 1,
4822 build_int_cst (TREE_TYPE (high1), 1),
4823 1)))
4824 high1 = 0;
4825 break;
4826 default:
4827 break;
4828 }
4829
4830 /* The ranges might be also adjacent between the maximum and
4831 minimum values of the given type. For
4832 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4833 return + [x + 1, y - 1]. */
4834 if (low0 == 0 && high1 == 0)
4835 {
4836 low = range_successor (high0);
4837 high = range_predecessor (low1);
4838 if (low == 0 || high == 0)
4839 return 0;
4840
4841 in_p = 1;
4842 }
4843 else
4844 return 0;
4845 }
4846 }
4847 else if (subset)
4848 in_p = 0, low = low0, high = high0;
4849 else
4850 in_p = 0, low = low0, high = high1;
4851 }
4852
4853 *pin_p = in_p, *plow = low, *phigh = high;
4854 return 1;
4855 }
4856 \f
4857
4858 /* Subroutine of fold, looking inside expressions of the form
4859 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4860 of the COND_EXPR. This function is being used also to optimize
4861 A op B ? C : A, by reversing the comparison first.
4862
4863 Return a folded expression whose code is not a COND_EXPR
4864 anymore, or NULL_TREE if no folding opportunity is found. */
4865
4866 static tree
4867 fold_cond_expr_with_comparison (location_t loc, tree type,
4868 tree arg0, tree arg1, tree arg2)
4869 {
4870 enum tree_code comp_code = TREE_CODE (arg0);
4871 tree arg00 = TREE_OPERAND (arg0, 0);
4872 tree arg01 = TREE_OPERAND (arg0, 1);
4873 tree arg1_type = TREE_TYPE (arg1);
4874 tree tem;
4875
4876 STRIP_NOPS (arg1);
4877 STRIP_NOPS (arg2);
4878
4879 /* If we have A op 0 ? A : -A, consider applying the following
4880 transformations:
4881
4882 A == 0? A : -A same as -A
4883 A != 0? A : -A same as A
4884 A >= 0? A : -A same as abs (A)
4885 A > 0? A : -A same as abs (A)
4886 A <= 0? A : -A same as -abs (A)
4887 A < 0? A : -A same as -abs (A)
4888
4889 None of these transformations work for modes with signed
4890 zeros. If A is +/-0, the first two transformations will
4891 change the sign of the result (from +0 to -0, or vice
4892 versa). The last four will fix the sign of the result,
4893 even though the original expressions could be positive or
4894 negative, depending on the sign of A.
4895
4896 Note that all these transformations are correct if A is
4897 NaN, since the two alternatives (A and -A) are also NaNs. */
4898 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4899 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4900 ? real_zerop (arg01)
4901 : integer_zerop (arg01))
4902 && ((TREE_CODE (arg2) == NEGATE_EXPR
4903 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4904 /* In the case that A is of the form X-Y, '-A' (arg2) may
4905 have already been folded to Y-X, check for that. */
4906 || (TREE_CODE (arg1) == MINUS_EXPR
4907 && TREE_CODE (arg2) == MINUS_EXPR
4908 && operand_equal_p (TREE_OPERAND (arg1, 0),
4909 TREE_OPERAND (arg2, 1), 0)
4910 && operand_equal_p (TREE_OPERAND (arg1, 1),
4911 TREE_OPERAND (arg2, 0), 0))))
4912 switch (comp_code)
4913 {
4914 case EQ_EXPR:
4915 case UNEQ_EXPR:
4916 tem = fold_convert_loc (loc, arg1_type, arg1);
4917 return pedantic_non_lvalue_loc (loc,
4918 fold_convert_loc (loc, type,
4919 negate_expr (tem)));
4920 case NE_EXPR:
4921 case LTGT_EXPR:
4922 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4923 case UNGE_EXPR:
4924 case UNGT_EXPR:
4925 if (flag_trapping_math)
4926 break;
4927 /* Fall through. */
4928 case GE_EXPR:
4929 case GT_EXPR:
4930 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4931 arg1 = fold_convert_loc (loc, signed_type_for
4932 (TREE_TYPE (arg1)), arg1);
4933 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4934 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
4935 case UNLE_EXPR:
4936 case UNLT_EXPR:
4937 if (flag_trapping_math)
4938 break;
4939 case LE_EXPR:
4940 case LT_EXPR:
4941 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
4942 arg1 = fold_convert_loc (loc, signed_type_for
4943 (TREE_TYPE (arg1)), arg1);
4944 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
4945 return negate_expr (fold_convert_loc (loc, type, tem));
4946 default:
4947 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
4948 break;
4949 }
4950
4951 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
4952 A == 0 ? A : 0 is always 0 unless A is -0. Note that
4953 both transformations are correct when A is NaN: A != 0
4954 is then true, and A == 0 is false. */
4955
4956 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4957 && integer_zerop (arg01) && integer_zerop (arg2))
4958 {
4959 if (comp_code == NE_EXPR)
4960 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
4961 else if (comp_code == EQ_EXPR)
4962 return build_zero_cst (type);
4963 }
4964
4965 /* Try some transformations of A op B ? A : B.
4966
4967 A == B? A : B same as B
4968 A != B? A : B same as A
4969 A >= B? A : B same as max (A, B)
4970 A > B? A : B same as max (B, A)
4971 A <= B? A : B same as min (A, B)
4972 A < B? A : B same as min (B, A)
4973
4974 As above, these transformations don't work in the presence
4975 of signed zeros. For example, if A and B are zeros of
4976 opposite sign, the first two transformations will change
4977 the sign of the result. In the last four, the original
4978 expressions give different results for (A=+0, B=-0) and
4979 (A=-0, B=+0), but the transformed expressions do not.
4980
4981 The first two transformations are correct if either A or B
4982 is a NaN. In the first transformation, the condition will
4983 be false, and B will indeed be chosen. In the case of the
4984 second transformation, the condition A != B will be true,
4985 and A will be chosen.
4986
4987 The conversions to max() and min() are not correct if B is
4988 a number and A is not. The conditions in the original
4989 expressions will be false, so all four give B. The min()
4990 and max() versions would give a NaN instead. */
4991 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4992 && operand_equal_for_comparison_p (arg01, arg2, arg00)
4993 /* Avoid these transformations if the COND_EXPR may be used
4994 as an lvalue in the C++ front-end. PR c++/19199. */
4995 && (in_gimple_form
4996 || VECTOR_TYPE_P (type)
4997 || (! lang_GNU_CXX ()
4998 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
4999 || ! maybe_lvalue_p (arg1)
5000 || ! maybe_lvalue_p (arg2)))
5001 {
5002 tree comp_op0 = arg00;
5003 tree comp_op1 = arg01;
5004 tree comp_type = TREE_TYPE (comp_op0);
5005
5006 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5007 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5008 {
5009 comp_type = type;
5010 comp_op0 = arg1;
5011 comp_op1 = arg2;
5012 }
5013
5014 switch (comp_code)
5015 {
5016 case EQ_EXPR:
5017 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5018 case NE_EXPR:
5019 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5020 case LE_EXPR:
5021 case LT_EXPR:
5022 case UNLE_EXPR:
5023 case UNLT_EXPR:
5024 /* In C++ a ?: expression can be an lvalue, so put the
5025 operand which will be used if they are equal first
5026 so that we can convert this back to the
5027 corresponding COND_EXPR. */
5028 if (!HONOR_NANS (arg1))
5029 {
5030 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5031 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5032 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5033 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5034 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5035 comp_op1, comp_op0);
5036 return pedantic_non_lvalue_loc (loc,
5037 fold_convert_loc (loc, type, tem));
5038 }
5039 break;
5040 case GE_EXPR:
5041 case GT_EXPR:
5042 case UNGE_EXPR:
5043 case UNGT_EXPR:
5044 if (!HONOR_NANS (arg1))
5045 {
5046 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5047 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5048 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5049 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5050 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5051 comp_op1, comp_op0);
5052 return pedantic_non_lvalue_loc (loc,
5053 fold_convert_loc (loc, type, tem));
5054 }
5055 break;
5056 case UNEQ_EXPR:
5057 if (!HONOR_NANS (arg1))
5058 return pedantic_non_lvalue_loc (loc,
5059 fold_convert_loc (loc, type, arg2));
5060 break;
5061 case LTGT_EXPR:
5062 if (!HONOR_NANS (arg1))
5063 return pedantic_non_lvalue_loc (loc,
5064 fold_convert_loc (loc, type, arg1));
5065 break;
5066 default:
5067 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5068 break;
5069 }
5070 }
5071
5072 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5073 we might still be able to simplify this. For example,
5074 if C1 is one less or one more than C2, this might have started
5075 out as a MIN or MAX and been transformed by this function.
5076 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5077
5078 if (INTEGRAL_TYPE_P (type)
5079 && TREE_CODE (arg01) == INTEGER_CST
5080 && TREE_CODE (arg2) == INTEGER_CST)
5081 switch (comp_code)
5082 {
5083 case EQ_EXPR:
5084 if (TREE_CODE (arg1) == INTEGER_CST)
5085 break;
5086 /* We can replace A with C1 in this case. */
5087 arg1 = fold_convert_loc (loc, type, arg01);
5088 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5089
5090 case LT_EXPR:
5091 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5092 MIN_EXPR, to preserve the signedness of the comparison. */
5093 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5094 OEP_ONLY_CONST)
5095 && operand_equal_p (arg01,
5096 const_binop (PLUS_EXPR, arg2,
5097 build_int_cst (type, 1)),
5098 OEP_ONLY_CONST))
5099 {
5100 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5101 fold_convert_loc (loc, TREE_TYPE (arg00),
5102 arg2));
5103 return pedantic_non_lvalue_loc (loc,
5104 fold_convert_loc (loc, type, tem));
5105 }
5106 break;
5107
5108 case LE_EXPR:
5109 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5110 as above. */
5111 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5112 OEP_ONLY_CONST)
5113 && operand_equal_p (arg01,
5114 const_binop (MINUS_EXPR, arg2,
5115 build_int_cst (type, 1)),
5116 OEP_ONLY_CONST))
5117 {
5118 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5119 fold_convert_loc (loc, TREE_TYPE (arg00),
5120 arg2));
5121 return pedantic_non_lvalue_loc (loc,
5122 fold_convert_loc (loc, type, tem));
5123 }
5124 break;
5125
5126 case GT_EXPR:
5127 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5128 MAX_EXPR, to preserve the signedness of the comparison. */
5129 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5130 OEP_ONLY_CONST)
5131 && operand_equal_p (arg01,
5132 const_binop (MINUS_EXPR, arg2,
5133 build_int_cst (type, 1)),
5134 OEP_ONLY_CONST))
5135 {
5136 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5137 fold_convert_loc (loc, TREE_TYPE (arg00),
5138 arg2));
5139 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5140 }
5141 break;
5142
5143 case GE_EXPR:
5144 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5145 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5146 OEP_ONLY_CONST)
5147 && operand_equal_p (arg01,
5148 const_binop (PLUS_EXPR, arg2,
5149 build_int_cst (type, 1)),
5150 OEP_ONLY_CONST))
5151 {
5152 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5153 fold_convert_loc (loc, TREE_TYPE (arg00),
5154 arg2));
5155 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5156 }
5157 break;
5158 case NE_EXPR:
5159 break;
5160 default:
5161 gcc_unreachable ();
5162 }
5163
5164 return NULL_TREE;
5165 }
5166
5167
5168 \f
5169 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5170 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5171 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5172 false) >= 2)
5173 #endif
5174
5175 /* EXP is some logical combination of boolean tests. See if we can
5176 merge it into some range test. Return the new tree if so. */
5177
5178 static tree
5179 fold_range_test (location_t loc, enum tree_code code, tree type,
5180 tree op0, tree op1)
5181 {
5182 int or_op = (code == TRUTH_ORIF_EXPR
5183 || code == TRUTH_OR_EXPR);
5184 int in0_p, in1_p, in_p;
5185 tree low0, low1, low, high0, high1, high;
5186 bool strict_overflow_p = false;
5187 tree tem, lhs, rhs;
5188 const char * const warnmsg = G_("assuming signed overflow does not occur "
5189 "when simplifying range test");
5190
5191 if (!INTEGRAL_TYPE_P (type))
5192 return 0;
5193
5194 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5195 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5196
5197 /* If this is an OR operation, invert both sides; we will invert
5198 again at the end. */
5199 if (or_op)
5200 in0_p = ! in0_p, in1_p = ! in1_p;
5201
5202 /* If both expressions are the same, if we can merge the ranges, and we
5203 can build the range test, return it or it inverted. If one of the
5204 ranges is always true or always false, consider it to be the same
5205 expression as the other. */
5206 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5207 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5208 in1_p, low1, high1)
5209 && 0 != (tem = (build_range_check (loc, type,
5210 lhs != 0 ? lhs
5211 : rhs != 0 ? rhs : integer_zero_node,
5212 in_p, low, high))))
5213 {
5214 if (strict_overflow_p)
5215 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5216 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5217 }
5218
5219 /* On machines where the branch cost is expensive, if this is a
5220 short-circuited branch and the underlying object on both sides
5221 is the same, make a non-short-circuit operation. */
5222 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5223 && lhs != 0 && rhs != 0
5224 && (code == TRUTH_ANDIF_EXPR
5225 || code == TRUTH_ORIF_EXPR)
5226 && operand_equal_p (lhs, rhs, 0))
5227 {
5228 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5229 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5230 which cases we can't do this. */
5231 if (simple_operand_p (lhs))
5232 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5233 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5234 type, op0, op1);
5235
5236 else if (!lang_hooks.decls.global_bindings_p ()
5237 && !CONTAINS_PLACEHOLDER_P (lhs))
5238 {
5239 tree common = save_expr (lhs);
5240
5241 if (0 != (lhs = build_range_check (loc, type, common,
5242 or_op ? ! in0_p : in0_p,
5243 low0, high0))
5244 && (0 != (rhs = build_range_check (loc, type, common,
5245 or_op ? ! in1_p : in1_p,
5246 low1, high1))))
5247 {
5248 if (strict_overflow_p)
5249 fold_overflow_warning (warnmsg,
5250 WARN_STRICT_OVERFLOW_COMPARISON);
5251 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5252 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5253 type, lhs, rhs);
5254 }
5255 }
5256 }
5257
5258 return 0;
5259 }
5260 \f
5261 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5262 bit value. Arrange things so the extra bits will be set to zero if and
5263 only if C is signed-extended to its full width. If MASK is nonzero,
5264 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5265
5266 static tree
5267 unextend (tree c, int p, int unsignedp, tree mask)
5268 {
5269 tree type = TREE_TYPE (c);
5270 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5271 tree temp;
5272
5273 if (p == modesize || unsignedp)
5274 return c;
5275
5276 /* We work by getting just the sign bit into the low-order bit, then
5277 into the high-order bit, then sign-extend. We then XOR that value
5278 with C. */
5279 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5280
5281 /* We must use a signed type in order to get an arithmetic right shift.
5282 However, we must also avoid introducing accidental overflows, so that
5283 a subsequent call to integer_zerop will work. Hence we must
5284 do the type conversion here. At this point, the constant is either
5285 zero or one, and the conversion to a signed type can never overflow.
5286 We could get an overflow if this conversion is done anywhere else. */
5287 if (TYPE_UNSIGNED (type))
5288 temp = fold_convert (signed_type_for (type), temp);
5289
5290 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5291 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5292 if (mask != 0)
5293 temp = const_binop (BIT_AND_EXPR, temp,
5294 fold_convert (TREE_TYPE (c), mask));
5295 /* If necessary, convert the type back to match the type of C. */
5296 if (TYPE_UNSIGNED (type))
5297 temp = fold_convert (type, temp);
5298
5299 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5300 }
5301 \f
5302 /* For an expression that has the form
5303 (A && B) || ~B
5304 or
5305 (A || B) && ~B,
5306 we can drop one of the inner expressions and simplify to
5307 A || ~B
5308 or
5309 A && ~B
5310 LOC is the location of the resulting expression. OP is the inner
5311 logical operation; the left-hand side in the examples above, while CMPOP
5312 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5313 removing a condition that guards another, as in
5314 (A != NULL && A->...) || A == NULL
5315 which we must not transform. If RHS_ONLY is true, only eliminate the
5316 right-most operand of the inner logical operation. */
5317
5318 static tree
5319 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5320 bool rhs_only)
5321 {
5322 tree type = TREE_TYPE (cmpop);
5323 enum tree_code code = TREE_CODE (cmpop);
5324 enum tree_code truthop_code = TREE_CODE (op);
5325 tree lhs = TREE_OPERAND (op, 0);
5326 tree rhs = TREE_OPERAND (op, 1);
5327 tree orig_lhs = lhs, orig_rhs = rhs;
5328 enum tree_code rhs_code = TREE_CODE (rhs);
5329 enum tree_code lhs_code = TREE_CODE (lhs);
5330 enum tree_code inv_code;
5331
5332 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5333 return NULL_TREE;
5334
5335 if (TREE_CODE_CLASS (code) != tcc_comparison)
5336 return NULL_TREE;
5337
5338 if (rhs_code == truthop_code)
5339 {
5340 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5341 if (newrhs != NULL_TREE)
5342 {
5343 rhs = newrhs;
5344 rhs_code = TREE_CODE (rhs);
5345 }
5346 }
5347 if (lhs_code == truthop_code && !rhs_only)
5348 {
5349 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5350 if (newlhs != NULL_TREE)
5351 {
5352 lhs = newlhs;
5353 lhs_code = TREE_CODE (lhs);
5354 }
5355 }
5356
5357 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5358 if (inv_code == rhs_code
5359 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5360 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5361 return lhs;
5362 if (!rhs_only && inv_code == lhs_code
5363 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5364 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5365 return rhs;
5366 if (rhs != orig_rhs || lhs != orig_lhs)
5367 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5368 lhs, rhs);
5369 return NULL_TREE;
5370 }
5371
5372 /* Find ways of folding logical expressions of LHS and RHS:
5373 Try to merge two comparisons to the same innermost item.
5374 Look for range tests like "ch >= '0' && ch <= '9'".
5375 Look for combinations of simple terms on machines with expensive branches
5376 and evaluate the RHS unconditionally.
5377
5378 For example, if we have p->a == 2 && p->b == 4 and we can make an
5379 object large enough to span both A and B, we can do this with a comparison
5380 against the object ANDed with the a mask.
5381
5382 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5383 operations to do this with one comparison.
5384
5385 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5386 function and the one above.
5387
5388 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5389 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5390
5391 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5392 two operands.
5393
5394 We return the simplified tree or 0 if no optimization is possible. */
5395
5396 static tree
5397 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5398 tree lhs, tree rhs)
5399 {
5400 /* If this is the "or" of two comparisons, we can do something if
5401 the comparisons are NE_EXPR. If this is the "and", we can do something
5402 if the comparisons are EQ_EXPR. I.e.,
5403 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5404
5405 WANTED_CODE is this operation code. For single bit fields, we can
5406 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5407 comparison for one-bit fields. */
5408
5409 enum tree_code wanted_code;
5410 enum tree_code lcode, rcode;
5411 tree ll_arg, lr_arg, rl_arg, rr_arg;
5412 tree ll_inner, lr_inner, rl_inner, rr_inner;
5413 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5414 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5415 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5416 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5417 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5418 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5419 machine_mode lnmode, rnmode;
5420 tree ll_mask, lr_mask, rl_mask, rr_mask;
5421 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5422 tree l_const, r_const;
5423 tree lntype, rntype, result;
5424 HOST_WIDE_INT first_bit, end_bit;
5425 int volatilep;
5426
5427 /* Start by getting the comparison codes. Fail if anything is volatile.
5428 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5429 it were surrounded with a NE_EXPR. */
5430
5431 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5432 return 0;
5433
5434 lcode = TREE_CODE (lhs);
5435 rcode = TREE_CODE (rhs);
5436
5437 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5438 {
5439 lhs = build2 (NE_EXPR, truth_type, lhs,
5440 build_int_cst (TREE_TYPE (lhs), 0));
5441 lcode = NE_EXPR;
5442 }
5443
5444 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5445 {
5446 rhs = build2 (NE_EXPR, truth_type, rhs,
5447 build_int_cst (TREE_TYPE (rhs), 0));
5448 rcode = NE_EXPR;
5449 }
5450
5451 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5452 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5453 return 0;
5454
5455 ll_arg = TREE_OPERAND (lhs, 0);
5456 lr_arg = TREE_OPERAND (lhs, 1);
5457 rl_arg = TREE_OPERAND (rhs, 0);
5458 rr_arg = TREE_OPERAND (rhs, 1);
5459
5460 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5461 if (simple_operand_p (ll_arg)
5462 && simple_operand_p (lr_arg))
5463 {
5464 if (operand_equal_p (ll_arg, rl_arg, 0)
5465 && operand_equal_p (lr_arg, rr_arg, 0))
5466 {
5467 result = combine_comparisons (loc, code, lcode, rcode,
5468 truth_type, ll_arg, lr_arg);
5469 if (result)
5470 return result;
5471 }
5472 else if (operand_equal_p (ll_arg, rr_arg, 0)
5473 && operand_equal_p (lr_arg, rl_arg, 0))
5474 {
5475 result = combine_comparisons (loc, code, lcode,
5476 swap_tree_comparison (rcode),
5477 truth_type, ll_arg, lr_arg);
5478 if (result)
5479 return result;
5480 }
5481 }
5482
5483 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5484 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5485
5486 /* If the RHS can be evaluated unconditionally and its operands are
5487 simple, it wins to evaluate the RHS unconditionally on machines
5488 with expensive branches. In this case, this isn't a comparison
5489 that can be merged. */
5490
5491 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5492 false) >= 2
5493 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5494 && simple_operand_p (rl_arg)
5495 && simple_operand_p (rr_arg))
5496 {
5497 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5498 if (code == TRUTH_OR_EXPR
5499 && lcode == NE_EXPR && integer_zerop (lr_arg)
5500 && rcode == NE_EXPR && integer_zerop (rr_arg)
5501 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5502 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5503 return build2_loc (loc, NE_EXPR, truth_type,
5504 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5505 ll_arg, rl_arg),
5506 build_int_cst (TREE_TYPE (ll_arg), 0));
5507
5508 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5509 if (code == TRUTH_AND_EXPR
5510 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5511 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5512 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5513 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5514 return build2_loc (loc, EQ_EXPR, truth_type,
5515 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5516 ll_arg, rl_arg),
5517 build_int_cst (TREE_TYPE (ll_arg), 0));
5518 }
5519
5520 /* See if the comparisons can be merged. Then get all the parameters for
5521 each side. */
5522
5523 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5524 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5525 return 0;
5526
5527 volatilep = 0;
5528 ll_inner = decode_field_reference (loc, ll_arg,
5529 &ll_bitsize, &ll_bitpos, &ll_mode,
5530 &ll_unsignedp, &volatilep, &ll_mask,
5531 &ll_and_mask);
5532 lr_inner = decode_field_reference (loc, lr_arg,
5533 &lr_bitsize, &lr_bitpos, &lr_mode,
5534 &lr_unsignedp, &volatilep, &lr_mask,
5535 &lr_and_mask);
5536 rl_inner = decode_field_reference (loc, rl_arg,
5537 &rl_bitsize, &rl_bitpos, &rl_mode,
5538 &rl_unsignedp, &volatilep, &rl_mask,
5539 &rl_and_mask);
5540 rr_inner = decode_field_reference (loc, rr_arg,
5541 &rr_bitsize, &rr_bitpos, &rr_mode,
5542 &rr_unsignedp, &volatilep, &rr_mask,
5543 &rr_and_mask);
5544
5545 /* It must be true that the inner operation on the lhs of each
5546 comparison must be the same if we are to be able to do anything.
5547 Then see if we have constants. If not, the same must be true for
5548 the rhs's. */
5549 if (volatilep || ll_inner == 0 || rl_inner == 0
5550 || ! operand_equal_p (ll_inner, rl_inner, 0))
5551 return 0;
5552
5553 if (TREE_CODE (lr_arg) == INTEGER_CST
5554 && TREE_CODE (rr_arg) == INTEGER_CST)
5555 l_const = lr_arg, r_const = rr_arg;
5556 else if (lr_inner == 0 || rr_inner == 0
5557 || ! operand_equal_p (lr_inner, rr_inner, 0))
5558 return 0;
5559 else
5560 l_const = r_const = 0;
5561
5562 /* If either comparison code is not correct for our logical operation,
5563 fail. However, we can convert a one-bit comparison against zero into
5564 the opposite comparison against that bit being set in the field. */
5565
5566 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5567 if (lcode != wanted_code)
5568 {
5569 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5570 {
5571 /* Make the left operand unsigned, since we are only interested
5572 in the value of one bit. Otherwise we are doing the wrong
5573 thing below. */
5574 ll_unsignedp = 1;
5575 l_const = ll_mask;
5576 }
5577 else
5578 return 0;
5579 }
5580
5581 /* This is analogous to the code for l_const above. */
5582 if (rcode != wanted_code)
5583 {
5584 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5585 {
5586 rl_unsignedp = 1;
5587 r_const = rl_mask;
5588 }
5589 else
5590 return 0;
5591 }
5592
5593 /* See if we can find a mode that contains both fields being compared on
5594 the left. If we can't, fail. Otherwise, update all constants and masks
5595 to be relative to a field of that size. */
5596 first_bit = MIN (ll_bitpos, rl_bitpos);
5597 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5598 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5599 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5600 volatilep);
5601 if (lnmode == VOIDmode)
5602 return 0;
5603
5604 lnbitsize = GET_MODE_BITSIZE (lnmode);
5605 lnbitpos = first_bit & ~ (lnbitsize - 1);
5606 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5607 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5608
5609 if (BYTES_BIG_ENDIAN)
5610 {
5611 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5612 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5613 }
5614
5615 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5616 size_int (xll_bitpos));
5617 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5618 size_int (xrl_bitpos));
5619
5620 if (l_const)
5621 {
5622 l_const = fold_convert_loc (loc, lntype, l_const);
5623 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5624 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5625 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5626 fold_build1_loc (loc, BIT_NOT_EXPR,
5627 lntype, ll_mask))))
5628 {
5629 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5630
5631 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5632 }
5633 }
5634 if (r_const)
5635 {
5636 r_const = fold_convert_loc (loc, lntype, r_const);
5637 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5638 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5639 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5640 fold_build1_loc (loc, BIT_NOT_EXPR,
5641 lntype, rl_mask))))
5642 {
5643 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5644
5645 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5646 }
5647 }
5648
5649 /* If the right sides are not constant, do the same for it. Also,
5650 disallow this optimization if a size or signedness mismatch occurs
5651 between the left and right sides. */
5652 if (l_const == 0)
5653 {
5654 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5655 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5656 /* Make sure the two fields on the right
5657 correspond to the left without being swapped. */
5658 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5659 return 0;
5660
5661 first_bit = MIN (lr_bitpos, rr_bitpos);
5662 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5663 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5664 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5665 volatilep);
5666 if (rnmode == VOIDmode)
5667 return 0;
5668
5669 rnbitsize = GET_MODE_BITSIZE (rnmode);
5670 rnbitpos = first_bit & ~ (rnbitsize - 1);
5671 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5672 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5673
5674 if (BYTES_BIG_ENDIAN)
5675 {
5676 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5677 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5678 }
5679
5680 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5681 rntype, lr_mask),
5682 size_int (xlr_bitpos));
5683 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5684 rntype, rr_mask),
5685 size_int (xrr_bitpos));
5686
5687 /* Make a mask that corresponds to both fields being compared.
5688 Do this for both items being compared. If the operands are the
5689 same size and the bits being compared are in the same position
5690 then we can do this by masking both and comparing the masked
5691 results. */
5692 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5693 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5694 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5695 {
5696 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5697 ll_unsignedp || rl_unsignedp);
5698 if (! all_ones_mask_p (ll_mask, lnbitsize))
5699 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5700
5701 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5702 lr_unsignedp || rr_unsignedp);
5703 if (! all_ones_mask_p (lr_mask, rnbitsize))
5704 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5705
5706 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5707 }
5708
5709 /* There is still another way we can do something: If both pairs of
5710 fields being compared are adjacent, we may be able to make a wider
5711 field containing them both.
5712
5713 Note that we still must mask the lhs/rhs expressions. Furthermore,
5714 the mask must be shifted to account for the shift done by
5715 make_bit_field_ref. */
5716 if ((ll_bitsize + ll_bitpos == rl_bitpos
5717 && lr_bitsize + lr_bitpos == rr_bitpos)
5718 || (ll_bitpos == rl_bitpos + rl_bitsize
5719 && lr_bitpos == rr_bitpos + rr_bitsize))
5720 {
5721 tree type;
5722
5723 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5724 ll_bitsize + rl_bitsize,
5725 MIN (ll_bitpos, rl_bitpos), ll_unsignedp);
5726 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5727 lr_bitsize + rr_bitsize,
5728 MIN (lr_bitpos, rr_bitpos), lr_unsignedp);
5729
5730 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5731 size_int (MIN (xll_bitpos, xrl_bitpos)));
5732 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5733 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5734
5735 /* Convert to the smaller type before masking out unwanted bits. */
5736 type = lntype;
5737 if (lntype != rntype)
5738 {
5739 if (lnbitsize > rnbitsize)
5740 {
5741 lhs = fold_convert_loc (loc, rntype, lhs);
5742 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5743 type = rntype;
5744 }
5745 else if (lnbitsize < rnbitsize)
5746 {
5747 rhs = fold_convert_loc (loc, lntype, rhs);
5748 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5749 type = lntype;
5750 }
5751 }
5752
5753 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5754 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5755
5756 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5757 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5758
5759 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5760 }
5761
5762 return 0;
5763 }
5764
5765 /* Handle the case of comparisons with constants. If there is something in
5766 common between the masks, those bits of the constants must be the same.
5767 If not, the condition is always false. Test for this to avoid generating
5768 incorrect code below. */
5769 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5770 if (! integer_zerop (result)
5771 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5772 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5773 {
5774 if (wanted_code == NE_EXPR)
5775 {
5776 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5777 return constant_boolean_node (true, truth_type);
5778 }
5779 else
5780 {
5781 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5782 return constant_boolean_node (false, truth_type);
5783 }
5784 }
5785
5786 /* Construct the expression we will return. First get the component
5787 reference we will make. Unless the mask is all ones the width of
5788 that field, perform the mask operation. Then compare with the
5789 merged constant. */
5790 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5791 ll_unsignedp || rl_unsignedp);
5792
5793 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5794 if (! all_ones_mask_p (ll_mask, lnbitsize))
5795 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5796
5797 return build2_loc (loc, wanted_code, truth_type, result,
5798 const_binop (BIT_IOR_EXPR, l_const, r_const));
5799 }
5800 \f
5801 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5802 constant. */
5803
5804 static tree
5805 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5806 tree op0, tree op1)
5807 {
5808 tree arg0 = op0;
5809 enum tree_code op_code;
5810 tree comp_const;
5811 tree minmax_const;
5812 int consts_equal, consts_lt;
5813 tree inner;
5814
5815 STRIP_SIGN_NOPS (arg0);
5816
5817 op_code = TREE_CODE (arg0);
5818 minmax_const = TREE_OPERAND (arg0, 1);
5819 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5820 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5821 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5822 inner = TREE_OPERAND (arg0, 0);
5823
5824 /* If something does not permit us to optimize, return the original tree. */
5825 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5826 || TREE_CODE (comp_const) != INTEGER_CST
5827 || TREE_OVERFLOW (comp_const)
5828 || TREE_CODE (minmax_const) != INTEGER_CST
5829 || TREE_OVERFLOW (minmax_const))
5830 return NULL_TREE;
5831
5832 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5833 and GT_EXPR, doing the rest with recursive calls using logical
5834 simplifications. */
5835 switch (code)
5836 {
5837 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5838 {
5839 tree tem
5840 = optimize_minmax_comparison (loc,
5841 invert_tree_comparison (code, false),
5842 type, op0, op1);
5843 if (tem)
5844 return invert_truthvalue_loc (loc, tem);
5845 return NULL_TREE;
5846 }
5847
5848 case GE_EXPR:
5849 return
5850 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5851 optimize_minmax_comparison
5852 (loc, EQ_EXPR, type, arg0, comp_const),
5853 optimize_minmax_comparison
5854 (loc, GT_EXPR, type, arg0, comp_const));
5855
5856 case EQ_EXPR:
5857 if (op_code == MAX_EXPR && consts_equal)
5858 /* MAX (X, 0) == 0 -> X <= 0 */
5859 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5860
5861 else if (op_code == MAX_EXPR && consts_lt)
5862 /* MAX (X, 0) == 5 -> X == 5 */
5863 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5864
5865 else if (op_code == MAX_EXPR)
5866 /* MAX (X, 0) == -1 -> false */
5867 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5868
5869 else if (consts_equal)
5870 /* MIN (X, 0) == 0 -> X >= 0 */
5871 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5872
5873 else if (consts_lt)
5874 /* MIN (X, 0) == 5 -> false */
5875 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5876
5877 else
5878 /* MIN (X, 0) == -1 -> X == -1 */
5879 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5880
5881 case GT_EXPR:
5882 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5883 /* MAX (X, 0) > 0 -> X > 0
5884 MAX (X, 0) > 5 -> X > 5 */
5885 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5886
5887 else if (op_code == MAX_EXPR)
5888 /* MAX (X, 0) > -1 -> true */
5889 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5890
5891 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5892 /* MIN (X, 0) > 0 -> false
5893 MIN (X, 0) > 5 -> false */
5894 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5895
5896 else
5897 /* MIN (X, 0) > -1 -> X > -1 */
5898 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5899
5900 default:
5901 return NULL_TREE;
5902 }
5903 }
5904 \f
5905 /* T is an integer expression that is being multiplied, divided, or taken a
5906 modulus (CODE says which and what kind of divide or modulus) by a
5907 constant C. See if we can eliminate that operation by folding it with
5908 other operations already in T. WIDE_TYPE, if non-null, is a type that
5909 should be used for the computation if wider than our type.
5910
5911 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
5912 (X * 2) + (Y * 4). We must, however, be assured that either the original
5913 expression would not overflow or that overflow is undefined for the type
5914 in the language in question.
5915
5916 If we return a non-null expression, it is an equivalent form of the
5917 original computation, but need not be in the original type.
5918
5919 We set *STRICT_OVERFLOW_P to true if the return values depends on
5920 signed overflow being undefined. Otherwise we do not change
5921 *STRICT_OVERFLOW_P. */
5922
5923 static tree
5924 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
5925 bool *strict_overflow_p)
5926 {
5927 /* To avoid exponential search depth, refuse to allow recursion past
5928 three levels. Beyond that (1) it's highly unlikely that we'll find
5929 something interesting and (2) we've probably processed it before
5930 when we built the inner expression. */
5931
5932 static int depth;
5933 tree ret;
5934
5935 if (depth > 3)
5936 return NULL;
5937
5938 depth++;
5939 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
5940 depth--;
5941
5942 return ret;
5943 }
5944
5945 static tree
5946 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
5947 bool *strict_overflow_p)
5948 {
5949 tree type = TREE_TYPE (t);
5950 enum tree_code tcode = TREE_CODE (t);
5951 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
5952 > GET_MODE_SIZE (TYPE_MODE (type)))
5953 ? wide_type : type);
5954 tree t1, t2;
5955 int same_p = tcode == code;
5956 tree op0 = NULL_TREE, op1 = NULL_TREE;
5957 bool sub_strict_overflow_p;
5958
5959 /* Don't deal with constants of zero here; they confuse the code below. */
5960 if (integer_zerop (c))
5961 return NULL_TREE;
5962
5963 if (TREE_CODE_CLASS (tcode) == tcc_unary)
5964 op0 = TREE_OPERAND (t, 0);
5965
5966 if (TREE_CODE_CLASS (tcode) == tcc_binary)
5967 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
5968
5969 /* Note that we need not handle conditional operations here since fold
5970 already handles those cases. So just do arithmetic here. */
5971 switch (tcode)
5972 {
5973 case INTEGER_CST:
5974 /* For a constant, we can always simplify if we are a multiply
5975 or (for divide and modulus) if it is a multiple of our constant. */
5976 if (code == MULT_EXPR
5977 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
5978 return const_binop (code, fold_convert (ctype, t),
5979 fold_convert (ctype, c));
5980 break;
5981
5982 CASE_CONVERT: case NON_LVALUE_EXPR:
5983 /* If op0 is an expression ... */
5984 if ((COMPARISON_CLASS_P (op0)
5985 || UNARY_CLASS_P (op0)
5986 || BINARY_CLASS_P (op0)
5987 || VL_EXP_CLASS_P (op0)
5988 || EXPRESSION_CLASS_P (op0))
5989 /* ... and has wrapping overflow, and its type is smaller
5990 than ctype, then we cannot pass through as widening. */
5991 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
5992 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
5993 && (TYPE_PRECISION (ctype)
5994 > TYPE_PRECISION (TREE_TYPE (op0))))
5995 /* ... or this is a truncation (t is narrower than op0),
5996 then we cannot pass through this narrowing. */
5997 || (TYPE_PRECISION (type)
5998 < TYPE_PRECISION (TREE_TYPE (op0)))
5999 /* ... or signedness changes for division or modulus,
6000 then we cannot pass through this conversion. */
6001 || (code != MULT_EXPR
6002 && (TYPE_UNSIGNED (ctype)
6003 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6004 /* ... or has undefined overflow while the converted to
6005 type has not, we cannot do the operation in the inner type
6006 as that would introduce undefined overflow. */
6007 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6008 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6009 && !TYPE_OVERFLOW_UNDEFINED (type))))
6010 break;
6011
6012 /* Pass the constant down and see if we can make a simplification. If
6013 we can, replace this expression with the inner simplification for
6014 possible later conversion to our or some other type. */
6015 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6016 && TREE_CODE (t2) == INTEGER_CST
6017 && !TREE_OVERFLOW (t2)
6018 && (0 != (t1 = extract_muldiv (op0, t2, code,
6019 code == MULT_EXPR
6020 ? ctype : NULL_TREE,
6021 strict_overflow_p))))
6022 return t1;
6023 break;
6024
6025 case ABS_EXPR:
6026 /* If widening the type changes it from signed to unsigned, then we
6027 must avoid building ABS_EXPR itself as unsigned. */
6028 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6029 {
6030 tree cstype = (*signed_type_for) (ctype);
6031 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6032 != 0)
6033 {
6034 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6035 return fold_convert (ctype, t1);
6036 }
6037 break;
6038 }
6039 /* If the constant is negative, we cannot simplify this. */
6040 if (tree_int_cst_sgn (c) == -1)
6041 break;
6042 /* FALLTHROUGH */
6043 case NEGATE_EXPR:
6044 /* For division and modulus, type can't be unsigned, as e.g.
6045 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6046 For signed types, even with wrapping overflow, this is fine. */
6047 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6048 break;
6049 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6050 != 0)
6051 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6052 break;
6053
6054 case MIN_EXPR: case MAX_EXPR:
6055 /* If widening the type changes the signedness, then we can't perform
6056 this optimization as that changes the result. */
6057 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6058 break;
6059
6060 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6061 sub_strict_overflow_p = false;
6062 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6063 &sub_strict_overflow_p)) != 0
6064 && (t2 = extract_muldiv (op1, c, code, wide_type,
6065 &sub_strict_overflow_p)) != 0)
6066 {
6067 if (tree_int_cst_sgn (c) < 0)
6068 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6069 if (sub_strict_overflow_p)
6070 *strict_overflow_p = true;
6071 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6072 fold_convert (ctype, t2));
6073 }
6074 break;
6075
6076 case LSHIFT_EXPR: case RSHIFT_EXPR:
6077 /* If the second operand is constant, this is a multiplication
6078 or floor division, by a power of two, so we can treat it that
6079 way unless the multiplier or divisor overflows. Signed
6080 left-shift overflow is implementation-defined rather than
6081 undefined in C90, so do not convert signed left shift into
6082 multiplication. */
6083 if (TREE_CODE (op1) == INTEGER_CST
6084 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6085 /* const_binop may not detect overflow correctly,
6086 so check for it explicitly here. */
6087 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6088 && 0 != (t1 = fold_convert (ctype,
6089 const_binop (LSHIFT_EXPR,
6090 size_one_node,
6091 op1)))
6092 && !TREE_OVERFLOW (t1))
6093 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6094 ? MULT_EXPR : FLOOR_DIV_EXPR,
6095 ctype,
6096 fold_convert (ctype, op0),
6097 t1),
6098 c, code, wide_type, strict_overflow_p);
6099 break;
6100
6101 case PLUS_EXPR: case MINUS_EXPR:
6102 /* See if we can eliminate the operation on both sides. If we can, we
6103 can return a new PLUS or MINUS. If we can't, the only remaining
6104 cases where we can do anything are if the second operand is a
6105 constant. */
6106 sub_strict_overflow_p = false;
6107 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6108 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6109 if (t1 != 0 && t2 != 0
6110 && (code == MULT_EXPR
6111 /* If not multiplication, we can only do this if both operands
6112 are divisible by c. */
6113 || (multiple_of_p (ctype, op0, c)
6114 && multiple_of_p (ctype, op1, c))))
6115 {
6116 if (sub_strict_overflow_p)
6117 *strict_overflow_p = true;
6118 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6119 fold_convert (ctype, t2));
6120 }
6121
6122 /* If this was a subtraction, negate OP1 and set it to be an addition.
6123 This simplifies the logic below. */
6124 if (tcode == MINUS_EXPR)
6125 {
6126 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6127 /* If OP1 was not easily negatable, the constant may be OP0. */
6128 if (TREE_CODE (op0) == INTEGER_CST)
6129 {
6130 std::swap (op0, op1);
6131 std::swap (t1, t2);
6132 }
6133 }
6134
6135 if (TREE_CODE (op1) != INTEGER_CST)
6136 break;
6137
6138 /* If either OP1 or C are negative, this optimization is not safe for
6139 some of the division and remainder types while for others we need
6140 to change the code. */
6141 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6142 {
6143 if (code == CEIL_DIV_EXPR)
6144 code = FLOOR_DIV_EXPR;
6145 else if (code == FLOOR_DIV_EXPR)
6146 code = CEIL_DIV_EXPR;
6147 else if (code != MULT_EXPR
6148 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6149 break;
6150 }
6151
6152 /* If it's a multiply or a division/modulus operation of a multiple
6153 of our constant, do the operation and verify it doesn't overflow. */
6154 if (code == MULT_EXPR
6155 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6156 {
6157 op1 = const_binop (code, fold_convert (ctype, op1),
6158 fold_convert (ctype, c));
6159 /* We allow the constant to overflow with wrapping semantics. */
6160 if (op1 == 0
6161 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6162 break;
6163 }
6164 else
6165 break;
6166
6167 /* If we have an unsigned type, we cannot widen the operation since it
6168 will change the result if the original computation overflowed. */
6169 if (TYPE_UNSIGNED (ctype) && ctype != type)
6170 break;
6171
6172 /* If we were able to eliminate our operation from the first side,
6173 apply our operation to the second side and reform the PLUS. */
6174 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6175 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6176
6177 /* The last case is if we are a multiply. In that case, we can
6178 apply the distributive law to commute the multiply and addition
6179 if the multiplication of the constants doesn't overflow
6180 and overflow is defined. With undefined overflow
6181 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6182 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6183 return fold_build2 (tcode, ctype,
6184 fold_build2 (code, ctype,
6185 fold_convert (ctype, op0),
6186 fold_convert (ctype, c)),
6187 op1);
6188
6189 break;
6190
6191 case MULT_EXPR:
6192 /* We have a special case here if we are doing something like
6193 (C * 8) % 4 since we know that's zero. */
6194 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6195 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6196 /* If the multiplication can overflow we cannot optimize this. */
6197 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6198 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6199 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6200 {
6201 *strict_overflow_p = true;
6202 return omit_one_operand (type, integer_zero_node, op0);
6203 }
6204
6205 /* ... fall through ... */
6206
6207 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6208 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6209 /* If we can extract our operation from the LHS, do so and return a
6210 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6211 do something only if the second operand is a constant. */
6212 if (same_p
6213 && (t1 = extract_muldiv (op0, c, code, wide_type,
6214 strict_overflow_p)) != 0)
6215 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6216 fold_convert (ctype, op1));
6217 else if (tcode == MULT_EXPR && code == MULT_EXPR
6218 && (t1 = extract_muldiv (op1, c, code, wide_type,
6219 strict_overflow_p)) != 0)
6220 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6221 fold_convert (ctype, t1));
6222 else if (TREE_CODE (op1) != INTEGER_CST)
6223 return 0;
6224
6225 /* If these are the same operation types, we can associate them
6226 assuming no overflow. */
6227 if (tcode == code)
6228 {
6229 bool overflow_p = false;
6230 bool overflow_mul_p;
6231 signop sign = TYPE_SIGN (ctype);
6232 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6233 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6234 if (overflow_mul_p
6235 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6236 overflow_p = true;
6237 if (!overflow_p)
6238 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6239 wide_int_to_tree (ctype, mul));
6240 }
6241
6242 /* If these operations "cancel" each other, we have the main
6243 optimizations of this pass, which occur when either constant is a
6244 multiple of the other, in which case we replace this with either an
6245 operation or CODE or TCODE.
6246
6247 If we have an unsigned type, we cannot do this since it will change
6248 the result if the original computation overflowed. */
6249 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6250 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6251 || (tcode == MULT_EXPR
6252 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6253 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6254 && code != MULT_EXPR)))
6255 {
6256 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6257 {
6258 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6259 *strict_overflow_p = true;
6260 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6261 fold_convert (ctype,
6262 const_binop (TRUNC_DIV_EXPR,
6263 op1, c)));
6264 }
6265 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6266 {
6267 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6268 *strict_overflow_p = true;
6269 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6270 fold_convert (ctype,
6271 const_binop (TRUNC_DIV_EXPR,
6272 c, op1)));
6273 }
6274 }
6275 break;
6276
6277 default:
6278 break;
6279 }
6280
6281 return 0;
6282 }
6283 \f
6284 /* Return a node which has the indicated constant VALUE (either 0 or
6285 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6286 and is of the indicated TYPE. */
6287
6288 tree
6289 constant_boolean_node (bool value, tree type)
6290 {
6291 if (type == integer_type_node)
6292 return value ? integer_one_node : integer_zero_node;
6293 else if (type == boolean_type_node)
6294 return value ? boolean_true_node : boolean_false_node;
6295 else if (TREE_CODE (type) == VECTOR_TYPE)
6296 return build_vector_from_val (type,
6297 build_int_cst (TREE_TYPE (type),
6298 value ? -1 : 0));
6299 else
6300 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6301 }
6302
6303
6304 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6305 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6306 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6307 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6308 COND is the first argument to CODE; otherwise (as in the example
6309 given here), it is the second argument. TYPE is the type of the
6310 original expression. Return NULL_TREE if no simplification is
6311 possible. */
6312
6313 static tree
6314 fold_binary_op_with_conditional_arg (location_t loc,
6315 enum tree_code code,
6316 tree type, tree op0, tree op1,
6317 tree cond, tree arg, int cond_first_p)
6318 {
6319 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6320 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6321 tree test, true_value, false_value;
6322 tree lhs = NULL_TREE;
6323 tree rhs = NULL_TREE;
6324 enum tree_code cond_code = COND_EXPR;
6325
6326 if (TREE_CODE (cond) == COND_EXPR
6327 || TREE_CODE (cond) == VEC_COND_EXPR)
6328 {
6329 test = TREE_OPERAND (cond, 0);
6330 true_value = TREE_OPERAND (cond, 1);
6331 false_value = TREE_OPERAND (cond, 2);
6332 /* If this operand throws an expression, then it does not make
6333 sense to try to perform a logical or arithmetic operation
6334 involving it. */
6335 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6336 lhs = true_value;
6337 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6338 rhs = false_value;
6339 }
6340 else
6341 {
6342 tree testtype = TREE_TYPE (cond);
6343 test = cond;
6344 true_value = constant_boolean_node (true, testtype);
6345 false_value = constant_boolean_node (false, testtype);
6346 }
6347
6348 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6349 cond_code = VEC_COND_EXPR;
6350
6351 /* This transformation is only worthwhile if we don't have to wrap ARG
6352 in a SAVE_EXPR and the operation can be simplified without recursing
6353 on at least one of the branches once its pushed inside the COND_EXPR. */
6354 if (!TREE_CONSTANT (arg)
6355 && (TREE_SIDE_EFFECTS (arg)
6356 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6357 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6358 return NULL_TREE;
6359
6360 arg = fold_convert_loc (loc, arg_type, arg);
6361 if (lhs == 0)
6362 {
6363 true_value = fold_convert_loc (loc, cond_type, true_value);
6364 if (cond_first_p)
6365 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6366 else
6367 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6368 }
6369 if (rhs == 0)
6370 {
6371 false_value = fold_convert_loc (loc, cond_type, false_value);
6372 if (cond_first_p)
6373 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6374 else
6375 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6376 }
6377
6378 /* Check that we have simplified at least one of the branches. */
6379 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6380 return NULL_TREE;
6381
6382 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6383 }
6384
6385 \f
6386 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6387
6388 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6389 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6390 ADDEND is the same as X.
6391
6392 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6393 and finite. The problematic cases are when X is zero, and its mode
6394 has signed zeros. In the case of rounding towards -infinity,
6395 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6396 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6397
6398 bool
6399 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6400 {
6401 if (!real_zerop (addend))
6402 return false;
6403
6404 /* Don't allow the fold with -fsignaling-nans. */
6405 if (HONOR_SNANS (element_mode (type)))
6406 return false;
6407
6408 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6409 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6410 return true;
6411
6412 /* In a vector or complex, we would need to check the sign of all zeros. */
6413 if (TREE_CODE (addend) != REAL_CST)
6414 return false;
6415
6416 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6417 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6418 negate = !negate;
6419
6420 /* The mode has signed zeros, and we have to honor their sign.
6421 In this situation, there is only one case we can return true for.
6422 X - 0 is the same as X unless rounding towards -infinity is
6423 supported. */
6424 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6425 }
6426
6427 /* Subroutine of fold() that checks comparisons of built-in math
6428 functions against real constants.
6429
6430 FCODE is the DECL_FUNCTION_CODE of the built-in, CODE is the comparison
6431 operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR, GE_EXPR or LE_EXPR. TYPE
6432 is the type of the result and ARG0 and ARG1 are the operands of the
6433 comparison. ARG1 must be a TREE_REAL_CST.
6434
6435 The function returns the constant folded tree if a simplification
6436 can be made, and NULL_TREE otherwise. */
6437
6438 static tree
6439 fold_mathfn_compare (location_t loc,
6440 enum built_in_function fcode, enum tree_code code,
6441 tree type, tree arg0, tree arg1)
6442 {
6443 REAL_VALUE_TYPE c;
6444
6445 if (BUILTIN_SQRT_P (fcode))
6446 {
6447 tree arg = CALL_EXPR_ARG (arg0, 0);
6448 machine_mode mode = TYPE_MODE (TREE_TYPE (arg0));
6449
6450 c = TREE_REAL_CST (arg1);
6451 if (REAL_VALUE_NEGATIVE (c))
6452 {
6453 /* sqrt(x) < y is always false, if y is negative. */
6454 if (code == EQ_EXPR || code == LT_EXPR || code == LE_EXPR)
6455 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6456
6457 /* sqrt(x) > y is always true, if y is negative and we
6458 don't care about NaNs, i.e. negative values of x. */
6459 if (code == NE_EXPR || !HONOR_NANS (mode))
6460 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6461
6462 /* sqrt(x) > y is the same as x >= 0, if y is negative. */
6463 return fold_build2_loc (loc, GE_EXPR, type, arg,
6464 build_real (TREE_TYPE (arg), dconst0));
6465 }
6466 else if (code == GT_EXPR || code == GE_EXPR)
6467 {
6468 REAL_VALUE_TYPE c2;
6469
6470 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6471 real_convert (&c2, mode, &c2);
6472
6473 if (REAL_VALUE_ISINF (c2))
6474 {
6475 /* sqrt(x) > y is x == +Inf, when y is very large. */
6476 if (HONOR_INFINITIES (mode))
6477 return fold_build2_loc (loc, EQ_EXPR, type, arg,
6478 build_real (TREE_TYPE (arg), c2));
6479
6480 /* sqrt(x) > y is always false, when y is very large
6481 and we don't care about infinities. */
6482 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
6483 }
6484
6485 /* sqrt(x) > c is the same as x > c*c. */
6486 return fold_build2_loc (loc, code, type, arg,
6487 build_real (TREE_TYPE (arg), c2));
6488 }
6489 else if (code == LT_EXPR || code == LE_EXPR)
6490 {
6491 REAL_VALUE_TYPE c2;
6492
6493 REAL_ARITHMETIC (c2, MULT_EXPR, c, c);
6494 real_convert (&c2, mode, &c2);
6495
6496 if (REAL_VALUE_ISINF (c2))
6497 {
6498 /* sqrt(x) < y is always true, when y is a very large
6499 value and we don't care about NaNs or Infinities. */
6500 if (! HONOR_NANS (mode) && ! HONOR_INFINITIES (mode))
6501 return omit_one_operand_loc (loc, type, integer_one_node, arg);
6502
6503 /* sqrt(x) < y is x != +Inf when y is very large and we
6504 don't care about NaNs. */
6505 if (! HONOR_NANS (mode))
6506 return fold_build2_loc (loc, NE_EXPR, type, arg,
6507 build_real (TREE_TYPE (arg), c2));
6508
6509 /* sqrt(x) < y is x >= 0 when y is very large and we
6510 don't care about Infinities. */
6511 if (! HONOR_INFINITIES (mode))
6512 return fold_build2_loc (loc, GE_EXPR, type, arg,
6513 build_real (TREE_TYPE (arg), dconst0));
6514
6515 /* sqrt(x) < y is x >= 0 && x != +Inf, when y is large. */
6516 arg = save_expr (arg);
6517 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6518 fold_build2_loc (loc, GE_EXPR, type, arg,
6519 build_real (TREE_TYPE (arg),
6520 dconst0)),
6521 fold_build2_loc (loc, NE_EXPR, type, arg,
6522 build_real (TREE_TYPE (arg),
6523 c2)));
6524 }
6525
6526 /* sqrt(x) < c is the same as x < c*c, if we ignore NaNs. */
6527 if (! HONOR_NANS (mode))
6528 return fold_build2_loc (loc, code, type, arg,
6529 build_real (TREE_TYPE (arg), c2));
6530
6531 /* sqrt(x) < c is the same as x >= 0 && x < c*c. */
6532 arg = save_expr (arg);
6533 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
6534 fold_build2_loc (loc, GE_EXPR, type, arg,
6535 build_real (TREE_TYPE (arg),
6536 dconst0)),
6537 fold_build2_loc (loc, code, type, arg,
6538 build_real (TREE_TYPE (arg),
6539 c2)));
6540 }
6541 }
6542
6543 return NULL_TREE;
6544 }
6545
6546 /* Subroutine of fold() that optimizes comparisons against Infinities,
6547 either +Inf or -Inf.
6548
6549 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6550 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6551 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6552
6553 The function returns the constant folded tree if a simplification
6554 can be made, and NULL_TREE otherwise. */
6555
6556 static tree
6557 fold_inf_compare (location_t loc, enum tree_code code, tree type,
6558 tree arg0, tree arg1)
6559 {
6560 machine_mode mode;
6561 REAL_VALUE_TYPE max;
6562 tree temp;
6563 bool neg;
6564
6565 mode = TYPE_MODE (TREE_TYPE (arg0));
6566
6567 /* For negative infinity swap the sense of the comparison. */
6568 neg = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1));
6569 if (neg)
6570 code = swap_tree_comparison (code);
6571
6572 switch (code)
6573 {
6574 case GT_EXPR:
6575 /* x > +Inf is always false, if with ignore sNANs. */
6576 if (HONOR_SNANS (mode))
6577 return NULL_TREE;
6578 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
6579
6580 case LE_EXPR:
6581 /* x <= +Inf is always true, if we don't case about NaNs. */
6582 if (! HONOR_NANS (mode))
6583 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
6584
6585 /* x <= +Inf is the same as x == x, i.e. isfinite(x). */
6586 arg0 = save_expr (arg0);
6587 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg0);
6588
6589 case EQ_EXPR:
6590 case GE_EXPR:
6591 /* x == +Inf and x >= +Inf are always equal to x > DBL_MAX. */
6592 real_maxval (&max, neg, mode);
6593 return fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6594 arg0, build_real (TREE_TYPE (arg0), max));
6595
6596 case LT_EXPR:
6597 /* x < +Inf is always equal to x <= DBL_MAX. */
6598 real_maxval (&max, neg, mode);
6599 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6600 arg0, build_real (TREE_TYPE (arg0), max));
6601
6602 case NE_EXPR:
6603 /* x != +Inf is always equal to !(x > DBL_MAX). */
6604 real_maxval (&max, neg, mode);
6605 if (! HONOR_NANS (mode))
6606 return fold_build2_loc (loc, neg ? GE_EXPR : LE_EXPR, type,
6607 arg0, build_real (TREE_TYPE (arg0), max));
6608
6609 temp = fold_build2_loc (loc, neg ? LT_EXPR : GT_EXPR, type,
6610 arg0, build_real (TREE_TYPE (arg0), max));
6611 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type, temp);
6612
6613 default:
6614 break;
6615 }
6616
6617 return NULL_TREE;
6618 }
6619
6620 /* Subroutine of fold() that optimizes comparisons of a division by
6621 a nonzero integer constant against an integer constant, i.e.
6622 X/C1 op C2.
6623
6624 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6625 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6626 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6627
6628 The function returns the constant folded tree if a simplification
6629 can be made, and NULL_TREE otherwise. */
6630
6631 static tree
6632 fold_div_compare (location_t loc,
6633 enum tree_code code, tree type, tree arg0, tree arg1)
6634 {
6635 tree prod, tmp, hi, lo;
6636 tree arg00 = TREE_OPERAND (arg0, 0);
6637 tree arg01 = TREE_OPERAND (arg0, 1);
6638 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6639 bool neg_overflow = false;
6640 bool overflow;
6641
6642 /* We have to do this the hard way to detect unsigned overflow.
6643 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6644 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6645 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6646 neg_overflow = false;
6647
6648 if (sign == UNSIGNED)
6649 {
6650 tmp = int_const_binop (MINUS_EXPR, arg01,
6651 build_int_cst (TREE_TYPE (arg01), 1));
6652 lo = prod;
6653
6654 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6655 val = wi::add (prod, tmp, sign, &overflow);
6656 hi = force_fit_type (TREE_TYPE (arg00), val,
6657 -1, overflow | TREE_OVERFLOW (prod));
6658 }
6659 else if (tree_int_cst_sgn (arg01) >= 0)
6660 {
6661 tmp = int_const_binop (MINUS_EXPR, arg01,
6662 build_int_cst (TREE_TYPE (arg01), 1));
6663 switch (tree_int_cst_sgn (arg1))
6664 {
6665 case -1:
6666 neg_overflow = true;
6667 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6668 hi = prod;
6669 break;
6670
6671 case 0:
6672 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6673 hi = tmp;
6674 break;
6675
6676 case 1:
6677 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6678 lo = prod;
6679 break;
6680
6681 default:
6682 gcc_unreachable ();
6683 }
6684 }
6685 else
6686 {
6687 /* A negative divisor reverses the relational operators. */
6688 code = swap_tree_comparison (code);
6689
6690 tmp = int_const_binop (PLUS_EXPR, arg01,
6691 build_int_cst (TREE_TYPE (arg01), 1));
6692 switch (tree_int_cst_sgn (arg1))
6693 {
6694 case -1:
6695 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6696 lo = prod;
6697 break;
6698
6699 case 0:
6700 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6701 lo = tmp;
6702 break;
6703
6704 case 1:
6705 neg_overflow = true;
6706 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6707 hi = prod;
6708 break;
6709
6710 default:
6711 gcc_unreachable ();
6712 }
6713 }
6714
6715 switch (code)
6716 {
6717 case EQ_EXPR:
6718 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6719 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6720 if (TREE_OVERFLOW (hi))
6721 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6722 if (TREE_OVERFLOW (lo))
6723 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6724 return build_range_check (loc, type, arg00, 1, lo, hi);
6725
6726 case NE_EXPR:
6727 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6728 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6729 if (TREE_OVERFLOW (hi))
6730 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6731 if (TREE_OVERFLOW (lo))
6732 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6733 return build_range_check (loc, type, arg00, 0, lo, hi);
6734
6735 case LT_EXPR:
6736 if (TREE_OVERFLOW (lo))
6737 {
6738 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6739 return omit_one_operand_loc (loc, type, tmp, arg00);
6740 }
6741 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6742
6743 case LE_EXPR:
6744 if (TREE_OVERFLOW (hi))
6745 {
6746 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6747 return omit_one_operand_loc (loc, type, tmp, arg00);
6748 }
6749 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6750
6751 case GT_EXPR:
6752 if (TREE_OVERFLOW (hi))
6753 {
6754 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6755 return omit_one_operand_loc (loc, type, tmp, arg00);
6756 }
6757 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6758
6759 case GE_EXPR:
6760 if (TREE_OVERFLOW (lo))
6761 {
6762 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6763 return omit_one_operand_loc (loc, type, tmp, arg00);
6764 }
6765 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6766
6767 default:
6768 break;
6769 }
6770
6771 return NULL_TREE;
6772 }
6773
6774
6775 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6776 equality/inequality test, then return a simplified form of the test
6777 using a sign testing. Otherwise return NULL. TYPE is the desired
6778 result type. */
6779
6780 static tree
6781 fold_single_bit_test_into_sign_test (location_t loc,
6782 enum tree_code code, tree arg0, tree arg1,
6783 tree result_type)
6784 {
6785 /* If this is testing a single bit, we can optimize the test. */
6786 if ((code == NE_EXPR || code == EQ_EXPR)
6787 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6788 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6789 {
6790 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6791 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6792 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6793
6794 if (arg00 != NULL_TREE
6795 /* This is only a win if casting to a signed type is cheap,
6796 i.e. when arg00's type is not a partial mode. */
6797 && TYPE_PRECISION (TREE_TYPE (arg00))
6798 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6799 {
6800 tree stype = signed_type_for (TREE_TYPE (arg00));
6801 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6802 result_type,
6803 fold_convert_loc (loc, stype, arg00),
6804 build_int_cst (stype, 0));
6805 }
6806 }
6807
6808 return NULL_TREE;
6809 }
6810
6811 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6812 equality/inequality test, then return a simplified form of
6813 the test using shifts and logical operations. Otherwise return
6814 NULL. TYPE is the desired result type. */
6815
6816 tree
6817 fold_single_bit_test (location_t loc, enum tree_code code,
6818 tree arg0, tree arg1, tree result_type)
6819 {
6820 /* If this is testing a single bit, we can optimize the test. */
6821 if ((code == NE_EXPR || code == EQ_EXPR)
6822 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6823 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6824 {
6825 tree inner = TREE_OPERAND (arg0, 0);
6826 tree type = TREE_TYPE (arg0);
6827 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6828 machine_mode operand_mode = TYPE_MODE (type);
6829 int ops_unsigned;
6830 tree signed_type, unsigned_type, intermediate_type;
6831 tree tem, one;
6832
6833 /* First, see if we can fold the single bit test into a sign-bit
6834 test. */
6835 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6836 result_type);
6837 if (tem)
6838 return tem;
6839
6840 /* Otherwise we have (A & C) != 0 where C is a single bit,
6841 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6842 Similarly for (A & C) == 0. */
6843
6844 /* If INNER is a right shift of a constant and it plus BITNUM does
6845 not overflow, adjust BITNUM and INNER. */
6846 if (TREE_CODE (inner) == RSHIFT_EXPR
6847 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6848 && bitnum < TYPE_PRECISION (type)
6849 && wi::ltu_p (TREE_OPERAND (inner, 1),
6850 TYPE_PRECISION (type) - bitnum))
6851 {
6852 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6853 inner = TREE_OPERAND (inner, 0);
6854 }
6855
6856 /* If we are going to be able to omit the AND below, we must do our
6857 operations as unsigned. If we must use the AND, we have a choice.
6858 Normally unsigned is faster, but for some machines signed is. */
6859 #ifdef LOAD_EXTEND_OP
6860 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6861 && !flag_syntax_only) ? 0 : 1;
6862 #else
6863 ops_unsigned = 1;
6864 #endif
6865
6866 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6867 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6868 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6869 inner = fold_convert_loc (loc, intermediate_type, inner);
6870
6871 if (bitnum != 0)
6872 inner = build2 (RSHIFT_EXPR, intermediate_type,
6873 inner, size_int (bitnum));
6874
6875 one = build_int_cst (intermediate_type, 1);
6876
6877 if (code == EQ_EXPR)
6878 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6879
6880 /* Put the AND last so it can combine with more things. */
6881 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6882
6883 /* Make sure to return the proper type. */
6884 inner = fold_convert_loc (loc, result_type, inner);
6885
6886 return inner;
6887 }
6888 return NULL_TREE;
6889 }
6890
6891 /* Check whether we are allowed to reorder operands arg0 and arg1,
6892 such that the evaluation of arg1 occurs before arg0. */
6893
6894 static bool
6895 reorder_operands_p (const_tree arg0, const_tree arg1)
6896 {
6897 if (! flag_evaluation_order)
6898 return true;
6899 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6900 return true;
6901 return ! TREE_SIDE_EFFECTS (arg0)
6902 && ! TREE_SIDE_EFFECTS (arg1);
6903 }
6904
6905 /* Test whether it is preferable two swap two operands, ARG0 and
6906 ARG1, for example because ARG0 is an integer constant and ARG1
6907 isn't. If REORDER is true, only recommend swapping if we can
6908 evaluate the operands in reverse order. */
6909
6910 bool
6911 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6912 {
6913 if (CONSTANT_CLASS_P (arg1))
6914 return 0;
6915 if (CONSTANT_CLASS_P (arg0))
6916 return 1;
6917
6918 STRIP_NOPS (arg0);
6919 STRIP_NOPS (arg1);
6920
6921 if (TREE_CONSTANT (arg1))
6922 return 0;
6923 if (TREE_CONSTANT (arg0))
6924 return 1;
6925
6926 if (reorder && flag_evaluation_order
6927 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6928 return 0;
6929
6930 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6931 for commutative and comparison operators. Ensuring a canonical
6932 form allows the optimizers to find additional redundancies without
6933 having to explicitly check for both orderings. */
6934 if (TREE_CODE (arg0) == SSA_NAME
6935 && TREE_CODE (arg1) == SSA_NAME
6936 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6937 return 1;
6938
6939 /* Put SSA_NAMEs last. */
6940 if (TREE_CODE (arg1) == SSA_NAME)
6941 return 0;
6942 if (TREE_CODE (arg0) == SSA_NAME)
6943 return 1;
6944
6945 /* Put variables last. */
6946 if (DECL_P (arg1))
6947 return 0;
6948 if (DECL_P (arg0))
6949 return 1;
6950
6951 return 0;
6952 }
6953
6954 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where
6955 ARG0 is extended to a wider type. */
6956
6957 static tree
6958 fold_widened_comparison (location_t loc, enum tree_code code,
6959 tree type, tree arg0, tree arg1)
6960 {
6961 tree arg0_unw = get_unwidened (arg0, NULL_TREE);
6962 tree arg1_unw;
6963 tree shorter_type, outer_type;
6964 tree min, max;
6965 bool above, below;
6966
6967 if (arg0_unw == arg0)
6968 return NULL_TREE;
6969 shorter_type = TREE_TYPE (arg0_unw);
6970
6971 #ifdef HAVE_canonicalize_funcptr_for_compare
6972 /* Disable this optimization if we're casting a function pointer
6973 type on targets that require function pointer canonicalization. */
6974 if (HAVE_canonicalize_funcptr_for_compare
6975 && TREE_CODE (shorter_type) == POINTER_TYPE
6976 && TREE_CODE (TREE_TYPE (shorter_type)) == FUNCTION_TYPE)
6977 return NULL_TREE;
6978 #endif
6979
6980 if (TYPE_PRECISION (TREE_TYPE (arg0)) <= TYPE_PRECISION (shorter_type))
6981 return NULL_TREE;
6982
6983 arg1_unw = get_unwidened (arg1, NULL_TREE);
6984
6985 /* If possible, express the comparison in the shorter mode. */
6986 if ((code == EQ_EXPR || code == NE_EXPR
6987 || TYPE_UNSIGNED (TREE_TYPE (arg0)) == TYPE_UNSIGNED (shorter_type))
6988 && (TREE_TYPE (arg1_unw) == shorter_type
6989 || ((TYPE_PRECISION (shorter_type)
6990 >= TYPE_PRECISION (TREE_TYPE (arg1_unw)))
6991 && (TYPE_UNSIGNED (shorter_type)
6992 == TYPE_UNSIGNED (TREE_TYPE (arg1_unw))))
6993 || (TREE_CODE (arg1_unw) == INTEGER_CST
6994 && (TREE_CODE (shorter_type) == INTEGER_TYPE
6995 || TREE_CODE (shorter_type) == BOOLEAN_TYPE)
6996 && int_fits_type_p (arg1_unw, shorter_type))))
6997 return fold_build2_loc (loc, code, type, arg0_unw,
6998 fold_convert_loc (loc, shorter_type, arg1_unw));
6999
7000 if (TREE_CODE (arg1_unw) != INTEGER_CST
7001 || TREE_CODE (shorter_type) != INTEGER_TYPE
7002 || !int_fits_type_p (arg1_unw, shorter_type))
7003 return NULL_TREE;
7004
7005 /* If we are comparing with the integer that does not fit into the range
7006 of the shorter type, the result is known. */
7007 outer_type = TREE_TYPE (arg1_unw);
7008 min = lower_bound_in_type (outer_type, shorter_type);
7009 max = upper_bound_in_type (outer_type, shorter_type);
7010
7011 above = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7012 max, arg1_unw));
7013 below = integer_nonzerop (fold_relational_const (LT_EXPR, type,
7014 arg1_unw, min));
7015
7016 switch (code)
7017 {
7018 case EQ_EXPR:
7019 if (above || below)
7020 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7021 break;
7022
7023 case NE_EXPR:
7024 if (above || below)
7025 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7026 break;
7027
7028 case LT_EXPR:
7029 case LE_EXPR:
7030 if (above)
7031 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7032 else if (below)
7033 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7034
7035 case GT_EXPR:
7036 case GE_EXPR:
7037 if (above)
7038 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
7039 else if (below)
7040 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
7041
7042 default:
7043 break;
7044 }
7045
7046 return NULL_TREE;
7047 }
7048
7049 /* Fold comparison ARG0 CODE ARG1 (with result in TYPE), where for
7050 ARG0 just the signedness is changed. */
7051
7052 static tree
7053 fold_sign_changed_comparison (location_t loc, enum tree_code code, tree type,
7054 tree arg0, tree arg1)
7055 {
7056 tree arg0_inner;
7057 tree inner_type, outer_type;
7058
7059 if (!CONVERT_EXPR_P (arg0))
7060 return NULL_TREE;
7061
7062 outer_type = TREE_TYPE (arg0);
7063 arg0_inner = TREE_OPERAND (arg0, 0);
7064 inner_type = TREE_TYPE (arg0_inner);
7065
7066 #ifdef HAVE_canonicalize_funcptr_for_compare
7067 /* Disable this optimization if we're casting a function pointer
7068 type on targets that require function pointer canonicalization. */
7069 if (HAVE_canonicalize_funcptr_for_compare
7070 && TREE_CODE (inner_type) == POINTER_TYPE
7071 && TREE_CODE (TREE_TYPE (inner_type)) == FUNCTION_TYPE)
7072 return NULL_TREE;
7073 #endif
7074
7075 if (TYPE_PRECISION (inner_type) != TYPE_PRECISION (outer_type))
7076 return NULL_TREE;
7077
7078 if (TREE_CODE (arg1) != INTEGER_CST
7079 && !(CONVERT_EXPR_P (arg1)
7080 && TREE_TYPE (TREE_OPERAND (arg1, 0)) == inner_type))
7081 return NULL_TREE;
7082
7083 if (TYPE_UNSIGNED (inner_type) != TYPE_UNSIGNED (outer_type)
7084 && code != NE_EXPR
7085 && code != EQ_EXPR)
7086 return NULL_TREE;
7087
7088 if (POINTER_TYPE_P (inner_type) != POINTER_TYPE_P (outer_type))
7089 return NULL_TREE;
7090
7091 if (TREE_CODE (arg1) == INTEGER_CST)
7092 arg1 = force_fit_type (inner_type, wi::to_widest (arg1), 0,
7093 TREE_OVERFLOW (arg1));
7094 else
7095 arg1 = fold_convert_loc (loc, inner_type, arg1);
7096
7097 return fold_build2_loc (loc, code, type, arg0_inner, arg1);
7098 }
7099
7100
7101 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
7102 means A >= Y && A != MAX, but in this case we know that
7103 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
7104
7105 static tree
7106 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
7107 {
7108 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
7109
7110 if (TREE_CODE (bound) == LT_EXPR)
7111 a = TREE_OPERAND (bound, 0);
7112 else if (TREE_CODE (bound) == GT_EXPR)
7113 a = TREE_OPERAND (bound, 1);
7114 else
7115 return NULL_TREE;
7116
7117 typea = TREE_TYPE (a);
7118 if (!INTEGRAL_TYPE_P (typea)
7119 && !POINTER_TYPE_P (typea))
7120 return NULL_TREE;
7121
7122 if (TREE_CODE (ineq) == LT_EXPR)
7123 {
7124 a1 = TREE_OPERAND (ineq, 1);
7125 y = TREE_OPERAND (ineq, 0);
7126 }
7127 else if (TREE_CODE (ineq) == GT_EXPR)
7128 {
7129 a1 = TREE_OPERAND (ineq, 0);
7130 y = TREE_OPERAND (ineq, 1);
7131 }
7132 else
7133 return NULL_TREE;
7134
7135 if (TREE_TYPE (a1) != typea)
7136 return NULL_TREE;
7137
7138 if (POINTER_TYPE_P (typea))
7139 {
7140 /* Convert the pointer types into integer before taking the difference. */
7141 tree ta = fold_convert_loc (loc, ssizetype, a);
7142 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
7143 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
7144 }
7145 else
7146 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
7147
7148 if (!diff || !integer_onep (diff))
7149 return NULL_TREE;
7150
7151 return fold_build2_loc (loc, GE_EXPR, type, a, y);
7152 }
7153
7154 /* Fold a sum or difference of at least one multiplication.
7155 Returns the folded tree or NULL if no simplification could be made. */
7156
7157 static tree
7158 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
7159 tree arg0, tree arg1)
7160 {
7161 tree arg00, arg01, arg10, arg11;
7162 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
7163
7164 /* (A * C) +- (B * C) -> (A+-B) * C.
7165 (A * C) +- A -> A * (C+-1).
7166 We are most concerned about the case where C is a constant,
7167 but other combinations show up during loop reduction. Since
7168 it is not difficult, try all four possibilities. */
7169
7170 if (TREE_CODE (arg0) == MULT_EXPR)
7171 {
7172 arg00 = TREE_OPERAND (arg0, 0);
7173 arg01 = TREE_OPERAND (arg0, 1);
7174 }
7175 else if (TREE_CODE (arg0) == INTEGER_CST)
7176 {
7177 arg00 = build_one_cst (type);
7178 arg01 = arg0;
7179 }
7180 else
7181 {
7182 /* We cannot generate constant 1 for fract. */
7183 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7184 return NULL_TREE;
7185 arg00 = arg0;
7186 arg01 = build_one_cst (type);
7187 }
7188 if (TREE_CODE (arg1) == MULT_EXPR)
7189 {
7190 arg10 = TREE_OPERAND (arg1, 0);
7191 arg11 = TREE_OPERAND (arg1, 1);
7192 }
7193 else if (TREE_CODE (arg1) == INTEGER_CST)
7194 {
7195 arg10 = build_one_cst (type);
7196 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7197 the purpose of this canonicalization. */
7198 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7199 && negate_expr_p (arg1)
7200 && code == PLUS_EXPR)
7201 {
7202 arg11 = negate_expr (arg1);
7203 code = MINUS_EXPR;
7204 }
7205 else
7206 arg11 = arg1;
7207 }
7208 else
7209 {
7210 /* We cannot generate constant 1 for fract. */
7211 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7212 return NULL_TREE;
7213 arg10 = arg1;
7214 arg11 = build_one_cst (type);
7215 }
7216 same = NULL_TREE;
7217
7218 if (operand_equal_p (arg01, arg11, 0))
7219 same = arg01, alt0 = arg00, alt1 = arg10;
7220 else if (operand_equal_p (arg00, arg10, 0))
7221 same = arg00, alt0 = arg01, alt1 = arg11;
7222 else if (operand_equal_p (arg00, arg11, 0))
7223 same = arg00, alt0 = arg01, alt1 = arg10;
7224 else if (operand_equal_p (arg01, arg10, 0))
7225 same = arg01, alt0 = arg00, alt1 = arg11;
7226
7227 /* No identical multiplicands; see if we can find a common
7228 power-of-two factor in non-power-of-two multiplies. This
7229 can help in multi-dimensional array access. */
7230 else if (tree_fits_shwi_p (arg01)
7231 && tree_fits_shwi_p (arg11))
7232 {
7233 HOST_WIDE_INT int01, int11, tmp;
7234 bool swap = false;
7235 tree maybe_same;
7236 int01 = tree_to_shwi (arg01);
7237 int11 = tree_to_shwi (arg11);
7238
7239 /* Move min of absolute values to int11. */
7240 if (absu_hwi (int01) < absu_hwi (int11))
7241 {
7242 tmp = int01, int01 = int11, int11 = tmp;
7243 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7244 maybe_same = arg01;
7245 swap = true;
7246 }
7247 else
7248 maybe_same = arg11;
7249
7250 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7251 /* The remainder should not be a constant, otherwise we
7252 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7253 increased the number of multiplications necessary. */
7254 && TREE_CODE (arg10) != INTEGER_CST)
7255 {
7256 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7257 build_int_cst (TREE_TYPE (arg00),
7258 int01 / int11));
7259 alt1 = arg10;
7260 same = maybe_same;
7261 if (swap)
7262 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7263 }
7264 }
7265
7266 if (same)
7267 return fold_build2_loc (loc, MULT_EXPR, type,
7268 fold_build2_loc (loc, code, type,
7269 fold_convert_loc (loc, type, alt0),
7270 fold_convert_loc (loc, type, alt1)),
7271 fold_convert_loc (loc, type, same));
7272
7273 return NULL_TREE;
7274 }
7275
7276 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7277 specified by EXPR into the buffer PTR of length LEN bytes.
7278 Return the number of bytes placed in the buffer, or zero
7279 upon failure. */
7280
7281 static int
7282 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7283 {
7284 tree type = TREE_TYPE (expr);
7285 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7286 int byte, offset, word, words;
7287 unsigned char value;
7288
7289 if ((off == -1 && total_bytes > len)
7290 || off >= total_bytes)
7291 return 0;
7292 if (off == -1)
7293 off = 0;
7294 words = total_bytes / UNITS_PER_WORD;
7295
7296 for (byte = 0; byte < total_bytes; byte++)
7297 {
7298 int bitpos = byte * BITS_PER_UNIT;
7299 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7300 number of bytes. */
7301 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7302
7303 if (total_bytes > UNITS_PER_WORD)
7304 {
7305 word = byte / UNITS_PER_WORD;
7306 if (WORDS_BIG_ENDIAN)
7307 word = (words - 1) - word;
7308 offset = word * UNITS_PER_WORD;
7309 if (BYTES_BIG_ENDIAN)
7310 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7311 else
7312 offset += byte % UNITS_PER_WORD;
7313 }
7314 else
7315 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7316 if (offset >= off
7317 && offset - off < len)
7318 ptr[offset - off] = value;
7319 }
7320 return MIN (len, total_bytes - off);
7321 }
7322
7323
7324 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7325 specified by EXPR into the buffer PTR of length LEN bytes.
7326 Return the number of bytes placed in the buffer, or zero
7327 upon failure. */
7328
7329 static int
7330 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7331 {
7332 tree type = TREE_TYPE (expr);
7333 machine_mode mode = TYPE_MODE (type);
7334 int total_bytes = GET_MODE_SIZE (mode);
7335 FIXED_VALUE_TYPE value;
7336 tree i_value, i_type;
7337
7338 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7339 return 0;
7340
7341 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7342
7343 if (NULL_TREE == i_type
7344 || TYPE_PRECISION (i_type) != total_bytes)
7345 return 0;
7346
7347 value = TREE_FIXED_CST (expr);
7348 i_value = double_int_to_tree (i_type, value.data);
7349
7350 return native_encode_int (i_value, ptr, len, off);
7351 }
7352
7353
7354 /* Subroutine of native_encode_expr. Encode the REAL_CST
7355 specified by EXPR into the buffer PTR of length LEN bytes.
7356 Return the number of bytes placed in the buffer, or zero
7357 upon failure. */
7358
7359 static int
7360 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7361 {
7362 tree type = TREE_TYPE (expr);
7363 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7364 int byte, offset, word, words, bitpos;
7365 unsigned char value;
7366
7367 /* There are always 32 bits in each long, no matter the size of
7368 the hosts long. We handle floating point representations with
7369 up to 192 bits. */
7370 long tmp[6];
7371
7372 if ((off == -1 && total_bytes > len)
7373 || off >= total_bytes)
7374 return 0;
7375 if (off == -1)
7376 off = 0;
7377 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7378
7379 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7380
7381 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7382 bitpos += BITS_PER_UNIT)
7383 {
7384 byte = (bitpos / BITS_PER_UNIT) & 3;
7385 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7386
7387 if (UNITS_PER_WORD < 4)
7388 {
7389 word = byte / UNITS_PER_WORD;
7390 if (WORDS_BIG_ENDIAN)
7391 word = (words - 1) - word;
7392 offset = word * UNITS_PER_WORD;
7393 if (BYTES_BIG_ENDIAN)
7394 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7395 else
7396 offset += byte % UNITS_PER_WORD;
7397 }
7398 else
7399 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7400 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7401 if (offset >= off
7402 && offset - off < len)
7403 ptr[offset - off] = value;
7404 }
7405 return MIN (len, total_bytes - off);
7406 }
7407
7408 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7409 specified by EXPR into the buffer PTR of length LEN bytes.
7410 Return the number of bytes placed in the buffer, or zero
7411 upon failure. */
7412
7413 static int
7414 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7415 {
7416 int rsize, isize;
7417 tree part;
7418
7419 part = TREE_REALPART (expr);
7420 rsize = native_encode_expr (part, ptr, len, off);
7421 if (off == -1
7422 && rsize == 0)
7423 return 0;
7424 part = TREE_IMAGPART (expr);
7425 if (off != -1)
7426 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7427 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7428 if (off == -1
7429 && isize != rsize)
7430 return 0;
7431 return rsize + isize;
7432 }
7433
7434
7435 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7436 specified by EXPR into the buffer PTR of length LEN bytes.
7437 Return the number of bytes placed in the buffer, or zero
7438 upon failure. */
7439
7440 static int
7441 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7442 {
7443 unsigned i, count;
7444 int size, offset;
7445 tree itype, elem;
7446
7447 offset = 0;
7448 count = VECTOR_CST_NELTS (expr);
7449 itype = TREE_TYPE (TREE_TYPE (expr));
7450 size = GET_MODE_SIZE (TYPE_MODE (itype));
7451 for (i = 0; i < count; i++)
7452 {
7453 if (off >= size)
7454 {
7455 off -= size;
7456 continue;
7457 }
7458 elem = VECTOR_CST_ELT (expr, i);
7459 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7460 if ((off == -1 && res != size)
7461 || res == 0)
7462 return 0;
7463 offset += res;
7464 if (offset >= len)
7465 return offset;
7466 if (off != -1)
7467 off = 0;
7468 }
7469 return offset;
7470 }
7471
7472
7473 /* Subroutine of native_encode_expr. Encode the STRING_CST
7474 specified by EXPR into the buffer PTR of length LEN bytes.
7475 Return the number of bytes placed in the buffer, or zero
7476 upon failure. */
7477
7478 static int
7479 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7480 {
7481 tree type = TREE_TYPE (expr);
7482 HOST_WIDE_INT total_bytes;
7483
7484 if (TREE_CODE (type) != ARRAY_TYPE
7485 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7486 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7487 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7488 return 0;
7489 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7490 if ((off == -1 && total_bytes > len)
7491 || off >= total_bytes)
7492 return 0;
7493 if (off == -1)
7494 off = 0;
7495 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7496 {
7497 int written = 0;
7498 if (off < TREE_STRING_LENGTH (expr))
7499 {
7500 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7501 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7502 }
7503 memset (ptr + written, 0,
7504 MIN (total_bytes - written, len - written));
7505 }
7506 else
7507 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7508 return MIN (total_bytes - off, len);
7509 }
7510
7511
7512 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7513 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7514 buffer PTR of length LEN bytes. If OFF is not -1 then start
7515 the encoding at byte offset OFF and encode at most LEN bytes.
7516 Return the number of bytes placed in the buffer, or zero upon failure. */
7517
7518 int
7519 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7520 {
7521 switch (TREE_CODE (expr))
7522 {
7523 case INTEGER_CST:
7524 return native_encode_int (expr, ptr, len, off);
7525
7526 case REAL_CST:
7527 return native_encode_real (expr, ptr, len, off);
7528
7529 case FIXED_CST:
7530 return native_encode_fixed (expr, ptr, len, off);
7531
7532 case COMPLEX_CST:
7533 return native_encode_complex (expr, ptr, len, off);
7534
7535 case VECTOR_CST:
7536 return native_encode_vector (expr, ptr, len, off);
7537
7538 case STRING_CST:
7539 return native_encode_string (expr, ptr, len, off);
7540
7541 default:
7542 return 0;
7543 }
7544 }
7545
7546
7547 /* Subroutine of native_interpret_expr. Interpret the contents of
7548 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7549 If the buffer cannot be interpreted, return NULL_TREE. */
7550
7551 static tree
7552 native_interpret_int (tree type, const unsigned char *ptr, int len)
7553 {
7554 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7555
7556 if (total_bytes > len
7557 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7558 return NULL_TREE;
7559
7560 wide_int result = wi::from_buffer (ptr, total_bytes);
7561
7562 return wide_int_to_tree (type, result);
7563 }
7564
7565
7566 /* Subroutine of native_interpret_expr. Interpret the contents of
7567 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7568 If the buffer cannot be interpreted, return NULL_TREE. */
7569
7570 static tree
7571 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7572 {
7573 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7574 double_int result;
7575 FIXED_VALUE_TYPE fixed_value;
7576
7577 if (total_bytes > len
7578 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7579 return NULL_TREE;
7580
7581 result = double_int::from_buffer (ptr, total_bytes);
7582 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7583
7584 return build_fixed (type, fixed_value);
7585 }
7586
7587
7588 /* Subroutine of native_interpret_expr. Interpret the contents of
7589 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7590 If the buffer cannot be interpreted, return NULL_TREE. */
7591
7592 static tree
7593 native_interpret_real (tree type, const unsigned char *ptr, int len)
7594 {
7595 machine_mode mode = TYPE_MODE (type);
7596 int total_bytes = GET_MODE_SIZE (mode);
7597 int byte, offset, word, words, bitpos;
7598 unsigned char value;
7599 /* There are always 32 bits in each long, no matter the size of
7600 the hosts long. We handle floating point representations with
7601 up to 192 bits. */
7602 REAL_VALUE_TYPE r;
7603 long tmp[6];
7604
7605 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7606 if (total_bytes > len || total_bytes > 24)
7607 return NULL_TREE;
7608 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7609
7610 memset (tmp, 0, sizeof (tmp));
7611 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7612 bitpos += BITS_PER_UNIT)
7613 {
7614 byte = (bitpos / BITS_PER_UNIT) & 3;
7615 if (UNITS_PER_WORD < 4)
7616 {
7617 word = byte / UNITS_PER_WORD;
7618 if (WORDS_BIG_ENDIAN)
7619 word = (words - 1) - word;
7620 offset = word * UNITS_PER_WORD;
7621 if (BYTES_BIG_ENDIAN)
7622 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7623 else
7624 offset += byte % UNITS_PER_WORD;
7625 }
7626 else
7627 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7628 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7629
7630 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7631 }
7632
7633 real_from_target (&r, tmp, mode);
7634 return build_real (type, r);
7635 }
7636
7637
7638 /* Subroutine of native_interpret_expr. Interpret the contents of
7639 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7640 If the buffer cannot be interpreted, return NULL_TREE. */
7641
7642 static tree
7643 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7644 {
7645 tree etype, rpart, ipart;
7646 int size;
7647
7648 etype = TREE_TYPE (type);
7649 size = GET_MODE_SIZE (TYPE_MODE (etype));
7650 if (size * 2 > len)
7651 return NULL_TREE;
7652 rpart = native_interpret_expr (etype, ptr, size);
7653 if (!rpart)
7654 return NULL_TREE;
7655 ipart = native_interpret_expr (etype, ptr+size, size);
7656 if (!ipart)
7657 return NULL_TREE;
7658 return build_complex (type, rpart, ipart);
7659 }
7660
7661
7662 /* Subroutine of native_interpret_expr. Interpret the contents of
7663 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7664 If the buffer cannot be interpreted, return NULL_TREE. */
7665
7666 static tree
7667 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7668 {
7669 tree etype, elem;
7670 int i, size, count;
7671 tree *elements;
7672
7673 etype = TREE_TYPE (type);
7674 size = GET_MODE_SIZE (TYPE_MODE (etype));
7675 count = TYPE_VECTOR_SUBPARTS (type);
7676 if (size * count > len)
7677 return NULL_TREE;
7678
7679 elements = XALLOCAVEC (tree, count);
7680 for (i = count - 1; i >= 0; i--)
7681 {
7682 elem = native_interpret_expr (etype, ptr+(i*size), size);
7683 if (!elem)
7684 return NULL_TREE;
7685 elements[i] = elem;
7686 }
7687 return build_vector (type, elements);
7688 }
7689
7690
7691 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7692 the buffer PTR of length LEN as a constant of type TYPE. For
7693 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7694 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7695 return NULL_TREE. */
7696
7697 tree
7698 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7699 {
7700 switch (TREE_CODE (type))
7701 {
7702 case INTEGER_TYPE:
7703 case ENUMERAL_TYPE:
7704 case BOOLEAN_TYPE:
7705 case POINTER_TYPE:
7706 case REFERENCE_TYPE:
7707 return native_interpret_int (type, ptr, len);
7708
7709 case REAL_TYPE:
7710 return native_interpret_real (type, ptr, len);
7711
7712 case FIXED_POINT_TYPE:
7713 return native_interpret_fixed (type, ptr, len);
7714
7715 case COMPLEX_TYPE:
7716 return native_interpret_complex (type, ptr, len);
7717
7718 case VECTOR_TYPE:
7719 return native_interpret_vector (type, ptr, len);
7720
7721 default:
7722 return NULL_TREE;
7723 }
7724 }
7725
7726 /* Returns true if we can interpret the contents of a native encoding
7727 as TYPE. */
7728
7729 static bool
7730 can_native_interpret_type_p (tree type)
7731 {
7732 switch (TREE_CODE (type))
7733 {
7734 case INTEGER_TYPE:
7735 case ENUMERAL_TYPE:
7736 case BOOLEAN_TYPE:
7737 case POINTER_TYPE:
7738 case REFERENCE_TYPE:
7739 case FIXED_POINT_TYPE:
7740 case REAL_TYPE:
7741 case COMPLEX_TYPE:
7742 case VECTOR_TYPE:
7743 return true;
7744 default:
7745 return false;
7746 }
7747 }
7748
7749 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7750 TYPE at compile-time. If we're unable to perform the conversion
7751 return NULL_TREE. */
7752
7753 static tree
7754 fold_view_convert_expr (tree type, tree expr)
7755 {
7756 /* We support up to 512-bit values (for V8DFmode). */
7757 unsigned char buffer[64];
7758 int len;
7759
7760 /* Check that the host and target are sane. */
7761 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7762 return NULL_TREE;
7763
7764 len = native_encode_expr (expr, buffer, sizeof (buffer));
7765 if (len == 0)
7766 return NULL_TREE;
7767
7768 return native_interpret_expr (type, buffer, len);
7769 }
7770
7771 /* Build an expression for the address of T. Folds away INDIRECT_REF
7772 to avoid confusing the gimplify process. */
7773
7774 tree
7775 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7776 {
7777 /* The size of the object is not relevant when talking about its address. */
7778 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7779 t = TREE_OPERAND (t, 0);
7780
7781 if (TREE_CODE (t) == INDIRECT_REF)
7782 {
7783 t = TREE_OPERAND (t, 0);
7784
7785 if (TREE_TYPE (t) != ptrtype)
7786 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7787 }
7788 else if (TREE_CODE (t) == MEM_REF
7789 && integer_zerop (TREE_OPERAND (t, 1)))
7790 return TREE_OPERAND (t, 0);
7791 else if (TREE_CODE (t) == MEM_REF
7792 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7793 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7794 TREE_OPERAND (t, 0),
7795 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7796 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7797 {
7798 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7799
7800 if (TREE_TYPE (t) != ptrtype)
7801 t = fold_convert_loc (loc, ptrtype, t);
7802 }
7803 else
7804 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7805
7806 return t;
7807 }
7808
7809 /* Build an expression for the address of T. */
7810
7811 tree
7812 build_fold_addr_expr_loc (location_t loc, tree t)
7813 {
7814 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7815
7816 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7817 }
7818
7819 /* Fold a unary expression of code CODE and type TYPE with operand
7820 OP0. Return the folded expression if folding is successful.
7821 Otherwise, return NULL_TREE. */
7822
7823 tree
7824 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7825 {
7826 tree tem;
7827 tree arg0;
7828 enum tree_code_class kind = TREE_CODE_CLASS (code);
7829
7830 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7831 && TREE_CODE_LENGTH (code) == 1);
7832
7833 arg0 = op0;
7834 if (arg0)
7835 {
7836 if (CONVERT_EXPR_CODE_P (code)
7837 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7838 {
7839 /* Don't use STRIP_NOPS, because signedness of argument type
7840 matters. */
7841 STRIP_SIGN_NOPS (arg0);
7842 }
7843 else
7844 {
7845 /* Strip any conversions that don't change the mode. This
7846 is safe for every expression, except for a comparison
7847 expression because its signedness is derived from its
7848 operands.
7849
7850 Note that this is done as an internal manipulation within
7851 the constant folder, in order to find the simplest
7852 representation of the arguments so that their form can be
7853 studied. In any cases, the appropriate type conversions
7854 should be put back in the tree that will get out of the
7855 constant folder. */
7856 STRIP_NOPS (arg0);
7857 }
7858
7859 if (CONSTANT_CLASS_P (arg0))
7860 {
7861 tree tem = const_unop (code, type, arg0);
7862 if (tem)
7863 {
7864 if (TREE_TYPE (tem) != type)
7865 tem = fold_convert_loc (loc, type, tem);
7866 return tem;
7867 }
7868 }
7869 }
7870
7871 tem = generic_simplify (loc, code, type, op0);
7872 if (tem)
7873 return tem;
7874
7875 if (TREE_CODE_CLASS (code) == tcc_unary)
7876 {
7877 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7878 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7879 fold_build1_loc (loc, code, type,
7880 fold_convert_loc (loc, TREE_TYPE (op0),
7881 TREE_OPERAND (arg0, 1))));
7882 else if (TREE_CODE (arg0) == COND_EXPR)
7883 {
7884 tree arg01 = TREE_OPERAND (arg0, 1);
7885 tree arg02 = TREE_OPERAND (arg0, 2);
7886 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7887 arg01 = fold_build1_loc (loc, code, type,
7888 fold_convert_loc (loc,
7889 TREE_TYPE (op0), arg01));
7890 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7891 arg02 = fold_build1_loc (loc, code, type,
7892 fold_convert_loc (loc,
7893 TREE_TYPE (op0), arg02));
7894 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7895 arg01, arg02);
7896
7897 /* If this was a conversion, and all we did was to move into
7898 inside the COND_EXPR, bring it back out. But leave it if
7899 it is a conversion from integer to integer and the
7900 result precision is no wider than a word since such a
7901 conversion is cheap and may be optimized away by combine,
7902 while it couldn't if it were outside the COND_EXPR. Then return
7903 so we don't get into an infinite recursion loop taking the
7904 conversion out and then back in. */
7905
7906 if ((CONVERT_EXPR_CODE_P (code)
7907 || code == NON_LVALUE_EXPR)
7908 && TREE_CODE (tem) == COND_EXPR
7909 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7910 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7911 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7912 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7913 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7914 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7915 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7916 && (INTEGRAL_TYPE_P
7917 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7918 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7919 || flag_syntax_only))
7920 tem = build1_loc (loc, code, type,
7921 build3 (COND_EXPR,
7922 TREE_TYPE (TREE_OPERAND
7923 (TREE_OPERAND (tem, 1), 0)),
7924 TREE_OPERAND (tem, 0),
7925 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7926 TREE_OPERAND (TREE_OPERAND (tem, 2),
7927 0)));
7928 return tem;
7929 }
7930 }
7931
7932 switch (code)
7933 {
7934 case NON_LVALUE_EXPR:
7935 if (!maybe_lvalue_p (op0))
7936 return fold_convert_loc (loc, type, op0);
7937 return NULL_TREE;
7938
7939 CASE_CONVERT:
7940 case FLOAT_EXPR:
7941 case FIX_TRUNC_EXPR:
7942 if (COMPARISON_CLASS_P (op0))
7943 {
7944 /* If we have (type) (a CMP b) and type is an integral type, return
7945 new expression involving the new type. Canonicalize
7946 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7947 non-integral type.
7948 Do not fold the result as that would not simplify further, also
7949 folding again results in recursions. */
7950 if (TREE_CODE (type) == BOOLEAN_TYPE)
7951 return build2_loc (loc, TREE_CODE (op0), type,
7952 TREE_OPERAND (op0, 0),
7953 TREE_OPERAND (op0, 1));
7954 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7955 && TREE_CODE (type) != VECTOR_TYPE)
7956 return build3_loc (loc, COND_EXPR, type, op0,
7957 constant_boolean_node (true, type),
7958 constant_boolean_node (false, type));
7959 }
7960
7961 /* Handle (T *)&A.B.C for A being of type T and B and C
7962 living at offset zero. This occurs frequently in
7963 C++ upcasting and then accessing the base. */
7964 if (TREE_CODE (op0) == ADDR_EXPR
7965 && POINTER_TYPE_P (type)
7966 && handled_component_p (TREE_OPERAND (op0, 0)))
7967 {
7968 HOST_WIDE_INT bitsize, bitpos;
7969 tree offset;
7970 machine_mode mode;
7971 int unsignedp, volatilep;
7972 tree base = TREE_OPERAND (op0, 0);
7973 base = get_inner_reference (base, &bitsize, &bitpos, &offset,
7974 &mode, &unsignedp, &volatilep, false);
7975 /* If the reference was to a (constant) zero offset, we can use
7976 the address of the base if it has the same base type
7977 as the result type and the pointer type is unqualified. */
7978 if (! offset && bitpos == 0
7979 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7980 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7981 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7982 return fold_convert_loc (loc, type,
7983 build_fold_addr_expr_loc (loc, base));
7984 }
7985
7986 if (TREE_CODE (op0) == MODIFY_EXPR
7987 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7988 /* Detect assigning a bitfield. */
7989 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7990 && DECL_BIT_FIELD
7991 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7992 {
7993 /* Don't leave an assignment inside a conversion
7994 unless assigning a bitfield. */
7995 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7996 /* First do the assignment, then return converted constant. */
7997 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7998 TREE_NO_WARNING (tem) = 1;
7999 TREE_USED (tem) = 1;
8000 return tem;
8001 }
8002
8003 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
8004 constants (if x has signed type, the sign bit cannot be set
8005 in c). This folds extension into the BIT_AND_EXPR.
8006 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
8007 very likely don't have maximal range for their precision and this
8008 transformation effectively doesn't preserve non-maximal ranges. */
8009 if (TREE_CODE (type) == INTEGER_TYPE
8010 && TREE_CODE (op0) == BIT_AND_EXPR
8011 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
8012 {
8013 tree and_expr = op0;
8014 tree and0 = TREE_OPERAND (and_expr, 0);
8015 tree and1 = TREE_OPERAND (and_expr, 1);
8016 int change = 0;
8017
8018 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
8019 || (TYPE_PRECISION (type)
8020 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
8021 change = 1;
8022 else if (TYPE_PRECISION (TREE_TYPE (and1))
8023 <= HOST_BITS_PER_WIDE_INT
8024 && tree_fits_uhwi_p (and1))
8025 {
8026 unsigned HOST_WIDE_INT cst;
8027
8028 cst = tree_to_uhwi (and1);
8029 cst &= HOST_WIDE_INT_M1U
8030 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
8031 change = (cst == 0);
8032 #ifdef LOAD_EXTEND_OP
8033 if (change
8034 && !flag_syntax_only
8035 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
8036 == ZERO_EXTEND))
8037 {
8038 tree uns = unsigned_type_for (TREE_TYPE (and0));
8039 and0 = fold_convert_loc (loc, uns, and0);
8040 and1 = fold_convert_loc (loc, uns, and1);
8041 }
8042 #endif
8043 }
8044 if (change)
8045 {
8046 tem = force_fit_type (type, wi::to_widest (and1), 0,
8047 TREE_OVERFLOW (and1));
8048 return fold_build2_loc (loc, BIT_AND_EXPR, type,
8049 fold_convert_loc (loc, type, and0), tem);
8050 }
8051 }
8052
8053 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
8054 when one of the new casts will fold away. Conservatively we assume
8055 that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
8056 if (POINTER_TYPE_P (type)
8057 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
8058 && (!TYPE_RESTRICT (type) || TYPE_RESTRICT (TREE_TYPE (arg0)))
8059 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8060 || TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
8061 || TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
8062 {
8063 tree arg00 = TREE_OPERAND (arg0, 0);
8064 tree arg01 = TREE_OPERAND (arg0, 1);
8065
8066 return fold_build_pointer_plus_loc
8067 (loc, fold_convert_loc (loc, type, arg00), arg01);
8068 }
8069
8070 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
8071 of the same precision, and X is an integer type not narrower than
8072 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
8073 if (INTEGRAL_TYPE_P (type)
8074 && TREE_CODE (op0) == BIT_NOT_EXPR
8075 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8076 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
8077 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
8078 {
8079 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
8080 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
8081 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
8082 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
8083 fold_convert_loc (loc, type, tem));
8084 }
8085
8086 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
8087 type of X and Y (integer types only). */
8088 if (INTEGRAL_TYPE_P (type)
8089 && TREE_CODE (op0) == MULT_EXPR
8090 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
8091 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
8092 {
8093 /* Be careful not to introduce new overflows. */
8094 tree mult_type;
8095 if (TYPE_OVERFLOW_WRAPS (type))
8096 mult_type = type;
8097 else
8098 mult_type = unsigned_type_for (type);
8099
8100 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
8101 {
8102 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
8103 fold_convert_loc (loc, mult_type,
8104 TREE_OPERAND (op0, 0)),
8105 fold_convert_loc (loc, mult_type,
8106 TREE_OPERAND (op0, 1)));
8107 return fold_convert_loc (loc, type, tem);
8108 }
8109 }
8110
8111 return NULL_TREE;
8112
8113 case VIEW_CONVERT_EXPR:
8114 if (TREE_CODE (op0) == MEM_REF)
8115 return fold_build2_loc (loc, MEM_REF, type,
8116 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
8117
8118 return NULL_TREE;
8119
8120 case NEGATE_EXPR:
8121 tem = fold_negate_expr (loc, arg0);
8122 if (tem)
8123 return fold_convert_loc (loc, type, tem);
8124 return NULL_TREE;
8125
8126 case ABS_EXPR:
8127 /* Convert fabs((double)float) into (double)fabsf(float). */
8128 if (TREE_CODE (arg0) == NOP_EXPR
8129 && TREE_CODE (type) == REAL_TYPE)
8130 {
8131 tree targ0 = strip_float_extensions (arg0);
8132 if (targ0 != arg0)
8133 return fold_convert_loc (loc, type,
8134 fold_build1_loc (loc, ABS_EXPR,
8135 TREE_TYPE (targ0),
8136 targ0));
8137 }
8138 /* ABS_EXPR<ABS_EXPR<x>> = ABS_EXPR<x> even if flag_wrapv is on. */
8139 else if (TREE_CODE (arg0) == ABS_EXPR)
8140 return arg0;
8141
8142 /* Strip sign ops from argument. */
8143 if (TREE_CODE (type) == REAL_TYPE)
8144 {
8145 tem = fold_strip_sign_ops (arg0);
8146 if (tem)
8147 return fold_build1_loc (loc, ABS_EXPR, type,
8148 fold_convert_loc (loc, type, tem));
8149 }
8150 return NULL_TREE;
8151
8152 case CONJ_EXPR:
8153 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8154 return fold_convert_loc (loc, type, arg0);
8155 if (TREE_CODE (arg0) == COMPLEX_EXPR)
8156 {
8157 tree itype = TREE_TYPE (type);
8158 tree rpart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 0));
8159 tree ipart = fold_convert_loc (loc, itype, TREE_OPERAND (arg0, 1));
8160 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart,
8161 negate_expr (ipart));
8162 }
8163 if (TREE_CODE (arg0) == CONJ_EXPR)
8164 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
8165 return NULL_TREE;
8166
8167 case BIT_NOT_EXPR:
8168 /* Convert ~ (-A) to A - 1. */
8169 if (INTEGRAL_TYPE_P (type) && TREE_CODE (arg0) == NEGATE_EXPR)
8170 return fold_build2_loc (loc, MINUS_EXPR, type,
8171 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0)),
8172 build_int_cst (type, 1));
8173 /* Convert ~ (A - 1) or ~ (A + -1) to -A. */
8174 else if (INTEGRAL_TYPE_P (type)
8175 && ((TREE_CODE (arg0) == MINUS_EXPR
8176 && integer_onep (TREE_OPERAND (arg0, 1)))
8177 || (TREE_CODE (arg0) == PLUS_EXPR
8178 && integer_all_onesp (TREE_OPERAND (arg0, 1)))))
8179 {
8180 /* Perform the negation in ARG0's type and only then convert
8181 to TYPE as to avoid introducing undefined behavior. */
8182 tree t = fold_build1_loc (loc, NEGATE_EXPR,
8183 TREE_TYPE (TREE_OPERAND (arg0, 0)),
8184 TREE_OPERAND (arg0, 0));
8185 return fold_convert_loc (loc, type, t);
8186 }
8187 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
8188 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8189 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8190 fold_convert_loc (loc, type,
8191 TREE_OPERAND (arg0, 0)))))
8192 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
8193 fold_convert_loc (loc, type,
8194 TREE_OPERAND (arg0, 1)));
8195 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
8196 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
8197 fold_convert_loc (loc, type,
8198 TREE_OPERAND (arg0, 1)))))
8199 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8200 fold_convert_loc (loc, type,
8201 TREE_OPERAND (arg0, 0)), tem);
8202
8203 return NULL_TREE;
8204
8205 case TRUTH_NOT_EXPR:
8206 /* Note that the operand of this must be an int
8207 and its values must be 0 or 1.
8208 ("true" is a fixed value perhaps depending on the language,
8209 but we don't handle values other than 1 correctly yet.) */
8210 tem = fold_truth_not_expr (loc, arg0);
8211 if (!tem)
8212 return NULL_TREE;
8213 return fold_convert_loc (loc, type, tem);
8214
8215 case REALPART_EXPR:
8216 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8217 return fold_convert_loc (loc, type, arg0);
8218 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8219 {
8220 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8221 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8222 fold_build1_loc (loc, REALPART_EXPR, itype,
8223 TREE_OPERAND (arg0, 0)),
8224 fold_build1_loc (loc, REALPART_EXPR, itype,
8225 TREE_OPERAND (arg0, 1)));
8226 return fold_convert_loc (loc, type, tem);
8227 }
8228 if (TREE_CODE (arg0) == CONJ_EXPR)
8229 {
8230 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8231 tem = fold_build1_loc (loc, REALPART_EXPR, itype,
8232 TREE_OPERAND (arg0, 0));
8233 return fold_convert_loc (loc, type, tem);
8234 }
8235 if (TREE_CODE (arg0) == CALL_EXPR)
8236 {
8237 tree fn = get_callee_fndecl (arg0);
8238 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8239 switch (DECL_FUNCTION_CODE (fn))
8240 {
8241 CASE_FLT_FN (BUILT_IN_CEXPI):
8242 fn = mathfn_built_in (type, BUILT_IN_COS);
8243 if (fn)
8244 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8245 break;
8246
8247 default:
8248 break;
8249 }
8250 }
8251 return NULL_TREE;
8252
8253 case IMAGPART_EXPR:
8254 if (TREE_CODE (TREE_TYPE (arg0)) != COMPLEX_TYPE)
8255 return build_zero_cst (type);
8256 if (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8257 {
8258 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8259 tem = fold_build2_loc (loc, TREE_CODE (arg0), itype,
8260 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8261 TREE_OPERAND (arg0, 0)),
8262 fold_build1_loc (loc, IMAGPART_EXPR, itype,
8263 TREE_OPERAND (arg0, 1)));
8264 return fold_convert_loc (loc, type, tem);
8265 }
8266 if (TREE_CODE (arg0) == CONJ_EXPR)
8267 {
8268 tree itype = TREE_TYPE (TREE_TYPE (arg0));
8269 tem = fold_build1_loc (loc, IMAGPART_EXPR, itype, TREE_OPERAND (arg0, 0));
8270 return fold_convert_loc (loc, type, negate_expr (tem));
8271 }
8272 if (TREE_CODE (arg0) == CALL_EXPR)
8273 {
8274 tree fn = get_callee_fndecl (arg0);
8275 if (fn && DECL_BUILT_IN_CLASS (fn) == BUILT_IN_NORMAL)
8276 switch (DECL_FUNCTION_CODE (fn))
8277 {
8278 CASE_FLT_FN (BUILT_IN_CEXPI):
8279 fn = mathfn_built_in (type, BUILT_IN_SIN);
8280 if (fn)
8281 return build_call_expr_loc (loc, fn, 1, CALL_EXPR_ARG (arg0, 0));
8282 break;
8283
8284 default:
8285 break;
8286 }
8287 }
8288 return NULL_TREE;
8289
8290 case INDIRECT_REF:
8291 /* Fold *&X to X if X is an lvalue. */
8292 if (TREE_CODE (op0) == ADDR_EXPR)
8293 {
8294 tree op00 = TREE_OPERAND (op0, 0);
8295 if ((TREE_CODE (op00) == VAR_DECL
8296 || TREE_CODE (op00) == PARM_DECL
8297 || TREE_CODE (op00) == RESULT_DECL)
8298 && !TREE_READONLY (op00))
8299 return op00;
8300 }
8301 return NULL_TREE;
8302
8303 default:
8304 return NULL_TREE;
8305 } /* switch (code) */
8306 }
8307
8308
8309 /* If the operation was a conversion do _not_ mark a resulting constant
8310 with TREE_OVERFLOW if the original constant was not. These conversions
8311 have implementation defined behavior and retaining the TREE_OVERFLOW
8312 flag here would confuse later passes such as VRP. */
8313 tree
8314 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8315 tree type, tree op0)
8316 {
8317 tree res = fold_unary_loc (loc, code, type, op0);
8318 if (res
8319 && TREE_CODE (res) == INTEGER_CST
8320 && TREE_CODE (op0) == INTEGER_CST
8321 && CONVERT_EXPR_CODE_P (code))
8322 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8323
8324 return res;
8325 }
8326
8327 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8328 operands OP0 and OP1. LOC is the location of the resulting expression.
8329 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8330 Return the folded expression if folding is successful. Otherwise,
8331 return NULL_TREE. */
8332 static tree
8333 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8334 tree arg0, tree arg1, tree op0, tree op1)
8335 {
8336 tree tem;
8337
8338 /* We only do these simplifications if we are optimizing. */
8339 if (!optimize)
8340 return NULL_TREE;
8341
8342 /* Check for things like (A || B) && (A || C). We can convert this
8343 to A || (B && C). Note that either operator can be any of the four
8344 truth and/or operations and the transformation will still be
8345 valid. Also note that we only care about order for the
8346 ANDIF and ORIF operators. If B contains side effects, this
8347 might change the truth-value of A. */
8348 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8349 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8350 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8351 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8352 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8353 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8354 {
8355 tree a00 = TREE_OPERAND (arg0, 0);
8356 tree a01 = TREE_OPERAND (arg0, 1);
8357 tree a10 = TREE_OPERAND (arg1, 0);
8358 tree a11 = TREE_OPERAND (arg1, 1);
8359 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8360 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8361 && (code == TRUTH_AND_EXPR
8362 || code == TRUTH_OR_EXPR));
8363
8364 if (operand_equal_p (a00, a10, 0))
8365 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8366 fold_build2_loc (loc, code, type, a01, a11));
8367 else if (commutative && operand_equal_p (a00, a11, 0))
8368 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8369 fold_build2_loc (loc, code, type, a01, a10));
8370 else if (commutative && operand_equal_p (a01, a10, 0))
8371 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8372 fold_build2_loc (loc, code, type, a00, a11));
8373
8374 /* This case if tricky because we must either have commutative
8375 operators or else A10 must not have side-effects. */
8376
8377 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8378 && operand_equal_p (a01, a11, 0))
8379 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8380 fold_build2_loc (loc, code, type, a00, a10),
8381 a01);
8382 }
8383
8384 /* See if we can build a range comparison. */
8385 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8386 return tem;
8387
8388 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8389 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8390 {
8391 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8392 if (tem)
8393 return fold_build2_loc (loc, code, type, tem, arg1);
8394 }
8395
8396 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8397 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8398 {
8399 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8400 if (tem)
8401 return fold_build2_loc (loc, code, type, arg0, tem);
8402 }
8403
8404 /* Check for the possibility of merging component references. If our
8405 lhs is another similar operation, try to merge its rhs with our
8406 rhs. Then try to merge our lhs and rhs. */
8407 if (TREE_CODE (arg0) == code
8408 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8409 TREE_OPERAND (arg0, 1), arg1)))
8410 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8411
8412 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8413 return tem;
8414
8415 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8416 && (code == TRUTH_AND_EXPR
8417 || code == TRUTH_ANDIF_EXPR
8418 || code == TRUTH_OR_EXPR
8419 || code == TRUTH_ORIF_EXPR))
8420 {
8421 enum tree_code ncode, icode;
8422
8423 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8424 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8425 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8426
8427 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8428 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8429 We don't want to pack more than two leafs to a non-IF AND/OR
8430 expression.
8431 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8432 equal to IF-CODE, then we don't want to add right-hand operand.
8433 If the inner right-hand side of left-hand operand has
8434 side-effects, or isn't simple, then we can't add to it,
8435 as otherwise we might destroy if-sequence. */
8436 if (TREE_CODE (arg0) == icode
8437 && simple_operand_p_2 (arg1)
8438 /* Needed for sequence points to handle trappings, and
8439 side-effects. */
8440 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8441 {
8442 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8443 arg1);
8444 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8445 tem);
8446 }
8447 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8448 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8449 else if (TREE_CODE (arg1) == icode
8450 && simple_operand_p_2 (arg0)
8451 /* Needed for sequence points to handle trappings, and
8452 side-effects. */
8453 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8454 {
8455 tem = fold_build2_loc (loc, ncode, type,
8456 arg0, TREE_OPERAND (arg1, 0));
8457 return fold_build2_loc (loc, icode, type, tem,
8458 TREE_OPERAND (arg1, 1));
8459 }
8460 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8461 into (A OR B).
8462 For sequence point consistancy, we need to check for trapping,
8463 and side-effects. */
8464 else if (code == icode && simple_operand_p_2 (arg0)
8465 && simple_operand_p_2 (arg1))
8466 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8467 }
8468
8469 return NULL_TREE;
8470 }
8471
8472 /* Fold a binary expression of code CODE and type TYPE with operands
8473 OP0 and OP1, containing either a MIN-MAX or a MAX-MIN combination.
8474 Return the folded expression if folding is successful. Otherwise,
8475 return NULL_TREE. */
8476
8477 static tree
8478 fold_minmax (location_t loc, enum tree_code code, tree type, tree op0, tree op1)
8479 {
8480 enum tree_code compl_code;
8481
8482 if (code == MIN_EXPR)
8483 compl_code = MAX_EXPR;
8484 else if (code == MAX_EXPR)
8485 compl_code = MIN_EXPR;
8486 else
8487 gcc_unreachable ();
8488
8489 /* MIN (MAX (a, b), b) == b. */
8490 if (TREE_CODE (op0) == compl_code
8491 && operand_equal_p (TREE_OPERAND (op0, 1), op1, 0))
8492 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 0));
8493
8494 /* MIN (MAX (b, a), b) == b. */
8495 if (TREE_CODE (op0) == compl_code
8496 && operand_equal_p (TREE_OPERAND (op0, 0), op1, 0)
8497 && reorder_operands_p (TREE_OPERAND (op0, 1), op1))
8498 return omit_one_operand_loc (loc, type, op1, TREE_OPERAND (op0, 1));
8499
8500 /* MIN (a, MAX (a, b)) == a. */
8501 if (TREE_CODE (op1) == compl_code
8502 && operand_equal_p (op0, TREE_OPERAND (op1, 0), 0)
8503 && reorder_operands_p (op0, TREE_OPERAND (op1, 1)))
8504 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 1));
8505
8506 /* MIN (a, MAX (b, a)) == a. */
8507 if (TREE_CODE (op1) == compl_code
8508 && operand_equal_p (op0, TREE_OPERAND (op1, 1), 0)
8509 && reorder_operands_p (op0, TREE_OPERAND (op1, 0)))
8510 return omit_one_operand_loc (loc, type, op0, TREE_OPERAND (op1, 0));
8511
8512 return NULL_TREE;
8513 }
8514
8515 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8516 by changing CODE to reduce the magnitude of constants involved in
8517 ARG0 of the comparison.
8518 Returns a canonicalized comparison tree if a simplification was
8519 possible, otherwise returns NULL_TREE.
8520 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8521 valid if signed overflow is undefined. */
8522
8523 static tree
8524 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8525 tree arg0, tree arg1,
8526 bool *strict_overflow_p)
8527 {
8528 enum tree_code code0 = TREE_CODE (arg0);
8529 tree t, cst0 = NULL_TREE;
8530 int sgn0;
8531 bool swap = false;
8532
8533 /* Match A +- CST code arg1 and CST code arg1. We can change the
8534 first form only if overflow is undefined. */
8535 if (!(((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8536 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8537 /* In principle pointers also have undefined overflow behavior,
8538 but that causes problems elsewhere. */
8539 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8540 && (code0 == MINUS_EXPR
8541 || code0 == PLUS_EXPR)
8542 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8543 || code0 == INTEGER_CST))
8544 return NULL_TREE;
8545
8546 /* Identify the constant in arg0 and its sign. */
8547 if (code0 == INTEGER_CST)
8548 cst0 = arg0;
8549 else
8550 cst0 = TREE_OPERAND (arg0, 1);
8551 sgn0 = tree_int_cst_sgn (cst0);
8552
8553 /* Overflowed constants and zero will cause problems. */
8554 if (integer_zerop (cst0)
8555 || TREE_OVERFLOW (cst0))
8556 return NULL_TREE;
8557
8558 /* See if we can reduce the magnitude of the constant in
8559 arg0 by changing the comparison code. */
8560 if (code0 == INTEGER_CST)
8561 {
8562 /* CST <= arg1 -> CST-1 < arg1. */
8563 if (code == LE_EXPR && sgn0 == 1)
8564 code = LT_EXPR;
8565 /* -CST < arg1 -> -CST-1 <= arg1. */
8566 else if (code == LT_EXPR && sgn0 == -1)
8567 code = LE_EXPR;
8568 /* CST > arg1 -> CST-1 >= arg1. */
8569 else if (code == GT_EXPR && sgn0 == 1)
8570 code = GE_EXPR;
8571 /* -CST >= arg1 -> -CST-1 > arg1. */
8572 else if (code == GE_EXPR && sgn0 == -1)
8573 code = GT_EXPR;
8574 else
8575 return NULL_TREE;
8576 /* arg1 code' CST' might be more canonical. */
8577 swap = true;
8578 }
8579 else
8580 {
8581 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8582 if (code == LT_EXPR
8583 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8584 code = LE_EXPR;
8585 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8586 else if (code == GT_EXPR
8587 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8588 code = GE_EXPR;
8589 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8590 else if (code == LE_EXPR
8591 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8592 code = LT_EXPR;
8593 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8594 else if (code == GE_EXPR
8595 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8596 code = GT_EXPR;
8597 else
8598 return NULL_TREE;
8599 *strict_overflow_p = true;
8600 }
8601
8602 /* Now build the constant reduced in magnitude. But not if that
8603 would produce one outside of its types range. */
8604 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8605 && ((sgn0 == 1
8606 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8607 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8608 || (sgn0 == -1
8609 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8610 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8611 /* We cannot swap the comparison here as that would cause us to
8612 endlessly recurse. */
8613 return NULL_TREE;
8614
8615 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8616 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8617 if (code0 != INTEGER_CST)
8618 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8619 t = fold_convert (TREE_TYPE (arg1), t);
8620
8621 /* If swapping might yield to a more canonical form, do so. */
8622 if (swap)
8623 return fold_build2_loc (loc, swap_tree_comparison (code), type, arg1, t);
8624 else
8625 return fold_build2_loc (loc, code, type, t, arg1);
8626 }
8627
8628 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8629 overflow further. Try to decrease the magnitude of constants involved
8630 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8631 and put sole constants at the second argument position.
8632 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8633
8634 static tree
8635 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8636 tree arg0, tree arg1)
8637 {
8638 tree t;
8639 bool strict_overflow_p;
8640 const char * const warnmsg = G_("assuming signed overflow does not occur "
8641 "when reducing constant in comparison");
8642
8643 /* Try canonicalization by simplifying arg0. */
8644 strict_overflow_p = false;
8645 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8646 &strict_overflow_p);
8647 if (t)
8648 {
8649 if (strict_overflow_p)
8650 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8651 return t;
8652 }
8653
8654 /* Try canonicalization by simplifying arg1 using the swapped
8655 comparison. */
8656 code = swap_tree_comparison (code);
8657 strict_overflow_p = false;
8658 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8659 &strict_overflow_p);
8660 if (t && strict_overflow_p)
8661 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8662 return t;
8663 }
8664
8665 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8666 space. This is used to avoid issuing overflow warnings for
8667 expressions like &p->x which can not wrap. */
8668
8669 static bool
8670 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8671 {
8672 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8673 return true;
8674
8675 if (bitpos < 0)
8676 return true;
8677
8678 wide_int wi_offset;
8679 int precision = TYPE_PRECISION (TREE_TYPE (base));
8680 if (offset == NULL_TREE)
8681 wi_offset = wi::zero (precision);
8682 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8683 return true;
8684 else
8685 wi_offset = offset;
8686
8687 bool overflow;
8688 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8689 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8690 if (overflow)
8691 return true;
8692
8693 if (!wi::fits_uhwi_p (total))
8694 return true;
8695
8696 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8697 if (size <= 0)
8698 return true;
8699
8700 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8701 array. */
8702 if (TREE_CODE (base) == ADDR_EXPR)
8703 {
8704 HOST_WIDE_INT base_size;
8705
8706 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8707 if (base_size > 0 && size < base_size)
8708 size = base_size;
8709 }
8710
8711 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8712 }
8713
8714 /* Return the HOST_WIDE_INT least significant bits of T, a sizetype
8715 kind INTEGER_CST. This makes sure to properly sign-extend the
8716 constant. */
8717
8718 static HOST_WIDE_INT
8719 size_low_cst (const_tree t)
8720 {
8721 HOST_WIDE_INT w = TREE_INT_CST_ELT (t, 0);
8722 int prec = TYPE_PRECISION (TREE_TYPE (t));
8723 if (prec < HOST_BITS_PER_WIDE_INT)
8724 return sext_hwi (w, prec);
8725 return w;
8726 }
8727
8728 /* Subroutine of fold_binary. This routine performs all of the
8729 transformations that are common to the equality/inequality
8730 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8731 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8732 fold_binary should call fold_binary. Fold a comparison with
8733 tree code CODE and type TYPE with operands OP0 and OP1. Return
8734 the folded comparison or NULL_TREE. */
8735
8736 static tree
8737 fold_comparison (location_t loc, enum tree_code code, tree type,
8738 tree op0, tree op1)
8739 {
8740 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8741 tree arg0, arg1, tem;
8742
8743 arg0 = op0;
8744 arg1 = op1;
8745
8746 STRIP_SIGN_NOPS (arg0);
8747 STRIP_SIGN_NOPS (arg1);
8748
8749 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8750 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8751 && (equality_code
8752 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8753 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8754 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8755 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8756 && TREE_CODE (arg1) == INTEGER_CST
8757 && !TREE_OVERFLOW (arg1))
8758 {
8759 const enum tree_code
8760 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8761 tree const1 = TREE_OPERAND (arg0, 1);
8762 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8763 tree variable = TREE_OPERAND (arg0, 0);
8764 tree new_const = int_const_binop (reverse_op, const2, const1);
8765
8766 /* If the constant operation overflowed this can be
8767 simplified as a comparison against INT_MAX/INT_MIN. */
8768 if (TREE_OVERFLOW (new_const)
8769 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8770 {
8771 int const1_sgn = tree_int_cst_sgn (const1);
8772 enum tree_code code2 = code;
8773
8774 /* Get the sign of the constant on the lhs if the
8775 operation were VARIABLE + CONST1. */
8776 if (TREE_CODE (arg0) == MINUS_EXPR)
8777 const1_sgn = -const1_sgn;
8778
8779 /* The sign of the constant determines if we overflowed
8780 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8781 Canonicalize to the INT_MIN overflow by swapping the comparison
8782 if necessary. */
8783 if (const1_sgn == -1)
8784 code2 = swap_tree_comparison (code);
8785
8786 /* We now can look at the canonicalized case
8787 VARIABLE + 1 CODE2 INT_MIN
8788 and decide on the result. */
8789 switch (code2)
8790 {
8791 case EQ_EXPR:
8792 case LT_EXPR:
8793 case LE_EXPR:
8794 return
8795 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8796
8797 case NE_EXPR:
8798 case GE_EXPR:
8799 case GT_EXPR:
8800 return
8801 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8802
8803 default:
8804 gcc_unreachable ();
8805 }
8806 }
8807 else
8808 {
8809 if (!equality_code)
8810 fold_overflow_warning ("assuming signed overflow does not occur "
8811 "when changing X +- C1 cmp C2 to "
8812 "X cmp C2 -+ C1",
8813 WARN_STRICT_OVERFLOW_COMPARISON);
8814 return fold_build2_loc (loc, code, type, variable, new_const);
8815 }
8816 }
8817
8818 /* Transform comparisons of the form X - Y CMP 0 to X CMP Y. */
8819 if (TREE_CODE (arg0) == MINUS_EXPR
8820 && equality_code
8821 && integer_zerop (arg1))
8822 {
8823 /* ??? The transformation is valid for the other operators if overflow
8824 is undefined for the type, but performing it here badly interacts
8825 with the transformation in fold_cond_expr_with_comparison which
8826 attempts to synthetize ABS_EXPR. */
8827 if (!equality_code)
8828 fold_overflow_warning ("assuming signed overflow does not occur "
8829 "when changing X - Y cmp 0 to X cmp Y",
8830 WARN_STRICT_OVERFLOW_COMPARISON);
8831 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
8832 TREE_OPERAND (arg0, 1));
8833 }
8834
8835 /* For comparisons of pointers we can decompose it to a compile time
8836 comparison of the base objects and the offsets into the object.
8837 This requires at least one operand being an ADDR_EXPR or a
8838 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8839 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8840 && (TREE_CODE (arg0) == ADDR_EXPR
8841 || TREE_CODE (arg1) == ADDR_EXPR
8842 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8843 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8844 {
8845 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8846 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8847 machine_mode mode;
8848 int volatilep, unsignedp;
8849 bool indirect_base0 = false, indirect_base1 = false;
8850
8851 /* Get base and offset for the access. Strip ADDR_EXPR for
8852 get_inner_reference, but put it back by stripping INDIRECT_REF
8853 off the base object if possible. indirect_baseN will be true
8854 if baseN is not an address but refers to the object itself. */
8855 base0 = arg0;
8856 if (TREE_CODE (arg0) == ADDR_EXPR)
8857 {
8858 base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
8859 &bitsize, &bitpos0, &offset0, &mode,
8860 &unsignedp, &volatilep, false);
8861 if (TREE_CODE (base0) == INDIRECT_REF)
8862 base0 = TREE_OPERAND (base0, 0);
8863 else
8864 indirect_base0 = true;
8865 }
8866 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8867 {
8868 base0 = TREE_OPERAND (arg0, 0);
8869 STRIP_SIGN_NOPS (base0);
8870 if (TREE_CODE (base0) == ADDR_EXPR)
8871 {
8872 base0 = TREE_OPERAND (base0, 0);
8873 indirect_base0 = true;
8874 }
8875 offset0 = TREE_OPERAND (arg0, 1);
8876 if (tree_fits_shwi_p (offset0))
8877 {
8878 HOST_WIDE_INT off = size_low_cst (offset0);
8879 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8880 * BITS_PER_UNIT)
8881 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8882 {
8883 bitpos0 = off * BITS_PER_UNIT;
8884 offset0 = NULL_TREE;
8885 }
8886 }
8887 }
8888
8889 base1 = arg1;
8890 if (TREE_CODE (arg1) == ADDR_EXPR)
8891 {
8892 base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
8893 &bitsize, &bitpos1, &offset1, &mode,
8894 &unsignedp, &volatilep, false);
8895 if (TREE_CODE (base1) == INDIRECT_REF)
8896 base1 = TREE_OPERAND (base1, 0);
8897 else
8898 indirect_base1 = true;
8899 }
8900 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8901 {
8902 base1 = TREE_OPERAND (arg1, 0);
8903 STRIP_SIGN_NOPS (base1);
8904 if (TREE_CODE (base1) == ADDR_EXPR)
8905 {
8906 base1 = TREE_OPERAND (base1, 0);
8907 indirect_base1 = true;
8908 }
8909 offset1 = TREE_OPERAND (arg1, 1);
8910 if (tree_fits_shwi_p (offset1))
8911 {
8912 HOST_WIDE_INT off = size_low_cst (offset1);
8913 if ((HOST_WIDE_INT) (((unsigned HOST_WIDE_INT) off)
8914 * BITS_PER_UNIT)
8915 / BITS_PER_UNIT == (HOST_WIDE_INT) off)
8916 {
8917 bitpos1 = off * BITS_PER_UNIT;
8918 offset1 = NULL_TREE;
8919 }
8920 }
8921 }
8922
8923 /* A local variable can never be pointed to by
8924 the default SSA name of an incoming parameter. */
8925 if ((TREE_CODE (arg0) == ADDR_EXPR
8926 && indirect_base0
8927 && TREE_CODE (base0) == VAR_DECL
8928 && auto_var_in_fn_p (base0, current_function_decl)
8929 && !indirect_base1
8930 && TREE_CODE (base1) == SSA_NAME
8931 && SSA_NAME_IS_DEFAULT_DEF (base1)
8932 && TREE_CODE (SSA_NAME_VAR (base1)) == PARM_DECL)
8933 || (TREE_CODE (arg1) == ADDR_EXPR
8934 && indirect_base1
8935 && TREE_CODE (base1) == VAR_DECL
8936 && auto_var_in_fn_p (base1, current_function_decl)
8937 && !indirect_base0
8938 && TREE_CODE (base0) == SSA_NAME
8939 && SSA_NAME_IS_DEFAULT_DEF (base0)
8940 && TREE_CODE (SSA_NAME_VAR (base0)) == PARM_DECL))
8941 {
8942 if (code == NE_EXPR)
8943 return constant_boolean_node (1, type);
8944 else if (code == EQ_EXPR)
8945 return constant_boolean_node (0, type);
8946 }
8947 /* If we have equivalent bases we might be able to simplify. */
8948 else if (indirect_base0 == indirect_base1
8949 && operand_equal_p (base0, base1, 0))
8950 {
8951 /* We can fold this expression to a constant if the non-constant
8952 offset parts are equal. */
8953 if ((offset0 == offset1
8954 || (offset0 && offset1
8955 && operand_equal_p (offset0, offset1, 0)))
8956 && (code == EQ_EXPR
8957 || code == NE_EXPR
8958 || (indirect_base0 && DECL_P (base0))
8959 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8960
8961 {
8962 if (!equality_code
8963 && bitpos0 != bitpos1
8964 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8965 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8966 fold_overflow_warning (("assuming pointer wraparound does not "
8967 "occur when comparing P +- C1 with "
8968 "P +- C2"),
8969 WARN_STRICT_OVERFLOW_CONDITIONAL);
8970
8971 switch (code)
8972 {
8973 case EQ_EXPR:
8974 return constant_boolean_node (bitpos0 == bitpos1, type);
8975 case NE_EXPR:
8976 return constant_boolean_node (bitpos0 != bitpos1, type);
8977 case LT_EXPR:
8978 return constant_boolean_node (bitpos0 < bitpos1, type);
8979 case LE_EXPR:
8980 return constant_boolean_node (bitpos0 <= bitpos1, type);
8981 case GE_EXPR:
8982 return constant_boolean_node (bitpos0 >= bitpos1, type);
8983 case GT_EXPR:
8984 return constant_boolean_node (bitpos0 > bitpos1, type);
8985 default:;
8986 }
8987 }
8988 /* We can simplify the comparison to a comparison of the variable
8989 offset parts if the constant offset parts are equal.
8990 Be careful to use signed sizetype here because otherwise we
8991 mess with array offsets in the wrong way. This is possible
8992 because pointer arithmetic is restricted to retain within an
8993 object and overflow on pointer differences is undefined as of
8994 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8995 else if (bitpos0 == bitpos1
8996 && (equality_code
8997 || (indirect_base0 && DECL_P (base0))
8998 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8999 {
9000 /* By converting to signed sizetype we cover middle-end pointer
9001 arithmetic which operates on unsigned pointer types of size
9002 type size and ARRAY_REF offsets which are properly sign or
9003 zero extended from their type in case it is narrower than
9004 sizetype. */
9005 if (offset0 == NULL_TREE)
9006 offset0 = build_int_cst (ssizetype, 0);
9007 else
9008 offset0 = fold_convert_loc (loc, ssizetype, offset0);
9009 if (offset1 == NULL_TREE)
9010 offset1 = build_int_cst (ssizetype, 0);
9011 else
9012 offset1 = fold_convert_loc (loc, ssizetype, offset1);
9013
9014 if (!equality_code
9015 && (pointer_may_wrap_p (base0, offset0, bitpos0)
9016 || pointer_may_wrap_p (base1, offset1, bitpos1)))
9017 fold_overflow_warning (("assuming pointer wraparound does not "
9018 "occur when comparing P +- C1 with "
9019 "P +- C2"),
9020 WARN_STRICT_OVERFLOW_COMPARISON);
9021
9022 return fold_build2_loc (loc, code, type, offset0, offset1);
9023 }
9024 }
9025 /* For non-equal bases we can simplify if they are addresses
9026 declarations with different addresses. */
9027 else if (indirect_base0 && indirect_base1
9028 /* We know that !operand_equal_p (base0, base1, 0)
9029 because the if condition was false. But make
9030 sure two decls are not the same. */
9031 && base0 != base1
9032 && TREE_CODE (arg0) == ADDR_EXPR
9033 && TREE_CODE (arg1) == ADDR_EXPR
9034 && DECL_P (base0)
9035 && DECL_P (base1)
9036 /* Watch for aliases. */
9037 && (!decl_in_symtab_p (base0)
9038 || !decl_in_symtab_p (base1)
9039 || !symtab_node::get_create (base0)->equal_address_to
9040 (symtab_node::get_create (base1))))
9041 {
9042 if (code == EQ_EXPR)
9043 return omit_two_operands_loc (loc, type, boolean_false_node,
9044 arg0, arg1);
9045 else if (code == NE_EXPR)
9046 return omit_two_operands_loc (loc, type, boolean_true_node,
9047 arg0, arg1);
9048 }
9049 /* For equal offsets we can simplify to a comparison of the
9050 base addresses. */
9051 else if (bitpos0 == bitpos1
9052 && (indirect_base0
9053 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
9054 && (indirect_base1
9055 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
9056 && ((offset0 == offset1)
9057 || (offset0 && offset1
9058 && operand_equal_p (offset0, offset1, 0))))
9059 {
9060 if (indirect_base0)
9061 base0 = build_fold_addr_expr_loc (loc, base0);
9062 if (indirect_base1)
9063 base1 = build_fold_addr_expr_loc (loc, base1);
9064 return fold_build2_loc (loc, code, type, base0, base1);
9065 }
9066 }
9067
9068 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
9069 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
9070 the resulting offset is smaller in absolute value than the
9071 original one and has the same sign. */
9072 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9073 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9074 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
9075 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9076 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9077 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
9078 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
9079 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
9080 {
9081 tree const1 = TREE_OPERAND (arg0, 1);
9082 tree const2 = TREE_OPERAND (arg1, 1);
9083 tree variable1 = TREE_OPERAND (arg0, 0);
9084 tree variable2 = TREE_OPERAND (arg1, 0);
9085 tree cst;
9086 const char * const warnmsg = G_("assuming signed overflow does not "
9087 "occur when combining constants around "
9088 "a comparison");
9089
9090 /* Put the constant on the side where it doesn't overflow and is
9091 of lower absolute value and of same sign than before. */
9092 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9093 ? MINUS_EXPR : PLUS_EXPR,
9094 const2, const1);
9095 if (!TREE_OVERFLOW (cst)
9096 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
9097 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
9098 {
9099 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9100 return fold_build2_loc (loc, code, type,
9101 variable1,
9102 fold_build2_loc (loc, TREE_CODE (arg1),
9103 TREE_TYPE (arg1),
9104 variable2, cst));
9105 }
9106
9107 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
9108 ? MINUS_EXPR : PLUS_EXPR,
9109 const1, const2);
9110 if (!TREE_OVERFLOW (cst)
9111 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
9112 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
9113 {
9114 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
9115 return fold_build2_loc (loc, code, type,
9116 fold_build2_loc (loc, TREE_CODE (arg0),
9117 TREE_TYPE (arg0),
9118 variable1, cst),
9119 variable2);
9120 }
9121 }
9122
9123 /* Transform comparisons of the form X * C1 CMP 0 to X CMP 0 in the
9124 signed arithmetic case. That form is created by the compiler
9125 often enough for folding it to be of value. One example is in
9126 computing loop trip counts after Operator Strength Reduction. */
9127 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
9129 && TREE_CODE (arg0) == MULT_EXPR
9130 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9131 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
9132 && integer_zerop (arg1))
9133 {
9134 tree const1 = TREE_OPERAND (arg0, 1);
9135 tree const2 = arg1; /* zero */
9136 tree variable1 = TREE_OPERAND (arg0, 0);
9137 enum tree_code cmp_code = code;
9138
9139 /* Handle unfolded multiplication by zero. */
9140 if (integer_zerop (const1))
9141 return fold_build2_loc (loc, cmp_code, type, const1, const2);
9142
9143 fold_overflow_warning (("assuming signed overflow does not occur when "
9144 "eliminating multiplication in comparison "
9145 "with zero"),
9146 WARN_STRICT_OVERFLOW_COMPARISON);
9147
9148 /* If const1 is negative we swap the sense of the comparison. */
9149 if (tree_int_cst_sgn (const1) < 0)
9150 cmp_code = swap_tree_comparison (cmp_code);
9151
9152 return fold_build2_loc (loc, cmp_code, type, variable1, const2);
9153 }
9154
9155 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
9156 if (tem)
9157 return tem;
9158
9159 if (FLOAT_TYPE_P (TREE_TYPE (arg0)))
9160 {
9161 tree targ0 = strip_float_extensions (arg0);
9162 tree targ1 = strip_float_extensions (arg1);
9163 tree newtype = TREE_TYPE (targ0);
9164
9165 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
9166 newtype = TREE_TYPE (targ1);
9167
9168 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
9169 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
9170 return fold_build2_loc (loc, code, type,
9171 fold_convert_loc (loc, newtype, targ0),
9172 fold_convert_loc (loc, newtype, targ1));
9173
9174 if (TREE_CODE (arg1) == REAL_CST)
9175 {
9176 REAL_VALUE_TYPE cst;
9177 cst = TREE_REAL_CST (arg1);
9178
9179 /* IEEE doesn't distinguish +0 and -0 in comparisons. */
9180 /* a CMP (-0) -> a CMP 0 */
9181 if (REAL_VALUE_MINUS_ZERO (cst))
9182 return fold_build2_loc (loc, code, type, arg0,
9183 build_real (TREE_TYPE (arg1), dconst0));
9184
9185 /* x != NaN is always true, other ops are always false. */
9186 if (REAL_VALUE_ISNAN (cst)
9187 && ! HONOR_SNANS (arg1))
9188 {
9189 tem = (code == NE_EXPR) ? integer_one_node : integer_zero_node;
9190 return omit_one_operand_loc (loc, type, tem, arg0);
9191 }
9192
9193 /* Fold comparisons against infinity. */
9194 if (REAL_VALUE_ISINF (cst)
9195 && MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1))))
9196 {
9197 tem = fold_inf_compare (loc, code, type, arg0, arg1);
9198 if (tem != NULL_TREE)
9199 return tem;
9200 }
9201 }
9202
9203 /* If this is a comparison of a real constant with a PLUS_EXPR
9204 or a MINUS_EXPR of a real constant, we can convert it into a
9205 comparison with a revised real constant as long as no overflow
9206 occurs when unsafe_math_optimizations are enabled. */
9207 if (flag_unsafe_math_optimizations
9208 && TREE_CODE (arg1) == REAL_CST
9209 && (TREE_CODE (arg0) == PLUS_EXPR
9210 || TREE_CODE (arg0) == MINUS_EXPR)
9211 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
9212 && 0 != (tem = const_binop (TREE_CODE (arg0) == PLUS_EXPR
9213 ? MINUS_EXPR : PLUS_EXPR,
9214 arg1, TREE_OPERAND (arg0, 1)))
9215 && !TREE_OVERFLOW (tem))
9216 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
9217
9218 /* Likewise, we can simplify a comparison of a real constant with
9219 a MINUS_EXPR whose first operand is also a real constant, i.e.
9220 (c1 - x) < c2 becomes x > c1-c2. Reordering is allowed on
9221 floating-point types only if -fassociative-math is set. */
9222 if (flag_associative_math
9223 && TREE_CODE (arg1) == REAL_CST
9224 && TREE_CODE (arg0) == MINUS_EXPR
9225 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST
9226 && 0 != (tem = const_binop (MINUS_EXPR, TREE_OPERAND (arg0, 0),
9227 arg1))
9228 && !TREE_OVERFLOW (tem))
9229 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9230 TREE_OPERAND (arg0, 1), tem);
9231
9232 /* Fold comparisons against built-in math functions. */
9233 if (TREE_CODE (arg1) == REAL_CST
9234 && flag_unsafe_math_optimizations
9235 && ! flag_errno_math)
9236 {
9237 enum built_in_function fcode = builtin_mathfn_code (arg0);
9238
9239 if (fcode != END_BUILTINS)
9240 {
9241 tem = fold_mathfn_compare (loc, fcode, code, type, arg0, arg1);
9242 if (tem != NULL_TREE)
9243 return tem;
9244 }
9245 }
9246 }
9247
9248 if (TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE
9249 && CONVERT_EXPR_P (arg0))
9250 {
9251 /* If we are widening one operand of an integer comparison,
9252 see if the other operand is similarly being widened. Perhaps we
9253 can do the comparison in the narrower type. */
9254 tem = fold_widened_comparison (loc, code, type, arg0, arg1);
9255 if (tem)
9256 return tem;
9257
9258 /* Or if we are changing signedness. */
9259 tem = fold_sign_changed_comparison (loc, code, type, arg0, arg1);
9260 if (tem)
9261 return tem;
9262 }
9263
9264 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
9265 constant, we can simplify it. */
9266 if (TREE_CODE (arg1) == INTEGER_CST
9267 && (TREE_CODE (arg0) == MIN_EXPR
9268 || TREE_CODE (arg0) == MAX_EXPR)
9269 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9270 {
9271 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
9272 if (tem)
9273 return tem;
9274 }
9275
9276 /* Simplify comparison of something with itself. (For IEEE
9277 floating-point, we can only do some of these simplifications.) */
9278 if (operand_equal_p (arg0, arg1, 0))
9279 {
9280 switch (code)
9281 {
9282 case EQ_EXPR:
9283 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9284 || ! HONOR_NANS (arg0))
9285 return constant_boolean_node (1, type);
9286 break;
9287
9288 case GE_EXPR:
9289 case LE_EXPR:
9290 if (! FLOAT_TYPE_P (TREE_TYPE (arg0))
9291 || ! HONOR_NANS (arg0))
9292 return constant_boolean_node (1, type);
9293 return fold_build2_loc (loc, EQ_EXPR, type, arg0, arg1);
9294
9295 case NE_EXPR:
9296 /* For NE, we can only do this simplification if integer
9297 or we don't honor IEEE floating point NaNs. */
9298 if (FLOAT_TYPE_P (TREE_TYPE (arg0))
9299 && HONOR_NANS (arg0))
9300 break;
9301 /* ... fall through ... */
9302 case GT_EXPR:
9303 case LT_EXPR:
9304 return constant_boolean_node (0, type);
9305 default:
9306 gcc_unreachable ();
9307 }
9308 }
9309
9310 /* If we are comparing an expression that just has comparisons
9311 of two integer values, arithmetic expressions of those comparisons,
9312 and constants, we can simplify it. There are only three cases
9313 to check: the two values can either be equal, the first can be
9314 greater, or the second can be greater. Fold the expression for
9315 those three values. Since each value must be 0 or 1, we have
9316 eight possibilities, each of which corresponds to the constant 0
9317 or 1 or one of the six possible comparisons.
9318
9319 This handles common cases like (a > b) == 0 but also handles
9320 expressions like ((x > y) - (y > x)) > 0, which supposedly
9321 occur in macroized code. */
9322
9323 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
9324 {
9325 tree cval1 = 0, cval2 = 0;
9326 int save_p = 0;
9327
9328 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
9329 /* Don't handle degenerate cases here; they should already
9330 have been handled anyway. */
9331 && cval1 != 0 && cval2 != 0
9332 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
9333 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
9334 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
9335 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
9336 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
9337 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
9338 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
9339 {
9340 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
9341 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
9342
9343 /* We can't just pass T to eval_subst in case cval1 or cval2
9344 was the same as ARG1. */
9345
9346 tree high_result
9347 = fold_build2_loc (loc, code, type,
9348 eval_subst (loc, arg0, cval1, maxval,
9349 cval2, minval),
9350 arg1);
9351 tree equal_result
9352 = fold_build2_loc (loc, code, type,
9353 eval_subst (loc, arg0, cval1, maxval,
9354 cval2, maxval),
9355 arg1);
9356 tree low_result
9357 = fold_build2_loc (loc, code, type,
9358 eval_subst (loc, arg0, cval1, minval,
9359 cval2, maxval),
9360 arg1);
9361
9362 /* All three of these results should be 0 or 1. Confirm they are.
9363 Then use those values to select the proper code to use. */
9364
9365 if (TREE_CODE (high_result) == INTEGER_CST
9366 && TREE_CODE (equal_result) == INTEGER_CST
9367 && TREE_CODE (low_result) == INTEGER_CST)
9368 {
9369 /* Make a 3-bit mask with the high-order bit being the
9370 value for `>', the next for '=', and the low for '<'. */
9371 switch ((integer_onep (high_result) * 4)
9372 + (integer_onep (equal_result) * 2)
9373 + integer_onep (low_result))
9374 {
9375 case 0:
9376 /* Always false. */
9377 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
9378 case 1:
9379 code = LT_EXPR;
9380 break;
9381 case 2:
9382 code = EQ_EXPR;
9383 break;
9384 case 3:
9385 code = LE_EXPR;
9386 break;
9387 case 4:
9388 code = GT_EXPR;
9389 break;
9390 case 5:
9391 code = NE_EXPR;
9392 break;
9393 case 6:
9394 code = GE_EXPR;
9395 break;
9396 case 7:
9397 /* Always true. */
9398 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
9399 }
9400
9401 if (save_p)
9402 {
9403 tem = save_expr (build2 (code, type, cval1, cval2));
9404 SET_EXPR_LOCATION (tem, loc);
9405 return tem;
9406 }
9407 return fold_build2_loc (loc, code, type, cval1, cval2);
9408 }
9409 }
9410 }
9411
9412 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
9413 into a single range test. */
9414 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
9415 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
9416 && TREE_CODE (arg1) == INTEGER_CST
9417 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
9418 && !integer_zerop (TREE_OPERAND (arg0, 1))
9419 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
9420 && !TREE_OVERFLOW (arg1))
9421 {
9422 tem = fold_div_compare (loc, code, type, arg0, arg1);
9423 if (tem != NULL_TREE)
9424 return tem;
9425 }
9426
9427 /* Fold ~X op ~Y as Y op X. */
9428 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9429 && TREE_CODE (arg1) == BIT_NOT_EXPR)
9430 {
9431 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9432 return fold_build2_loc (loc, code, type,
9433 fold_convert_loc (loc, cmp_type,
9434 TREE_OPERAND (arg1, 0)),
9435 TREE_OPERAND (arg0, 0));
9436 }
9437
9438 /* Fold ~X op C as X op' ~C, where op' is the swapped comparison. */
9439 if (TREE_CODE (arg0) == BIT_NOT_EXPR
9440 && (TREE_CODE (arg1) == INTEGER_CST || TREE_CODE (arg1) == VECTOR_CST))
9441 {
9442 tree cmp_type = TREE_TYPE (TREE_OPERAND (arg0, 0));
9443 return fold_build2_loc (loc, swap_tree_comparison (code), type,
9444 TREE_OPERAND (arg0, 0),
9445 fold_build1_loc (loc, BIT_NOT_EXPR, cmp_type,
9446 fold_convert_loc (loc, cmp_type, arg1)));
9447 }
9448
9449 return NULL_TREE;
9450 }
9451
9452
9453 /* Subroutine of fold_binary. Optimize complex multiplications of the
9454 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
9455 argument EXPR represents the expression "z" of type TYPE. */
9456
9457 static tree
9458 fold_mult_zconjz (location_t loc, tree type, tree expr)
9459 {
9460 tree itype = TREE_TYPE (type);
9461 tree rpart, ipart, tem;
9462
9463 if (TREE_CODE (expr) == COMPLEX_EXPR)
9464 {
9465 rpart = TREE_OPERAND (expr, 0);
9466 ipart = TREE_OPERAND (expr, 1);
9467 }
9468 else if (TREE_CODE (expr) == COMPLEX_CST)
9469 {
9470 rpart = TREE_REALPART (expr);
9471 ipart = TREE_IMAGPART (expr);
9472 }
9473 else
9474 {
9475 expr = save_expr (expr);
9476 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
9477 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
9478 }
9479
9480 rpart = save_expr (rpart);
9481 ipart = save_expr (ipart);
9482 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
9483 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
9484 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
9485 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
9486 build_zero_cst (itype));
9487 }
9488
9489
9490 /* Subroutine of fold_binary. If P is the value of EXPR, computes
9491 power-of-two M and (arbitrary) N such that M divides (P-N). This condition
9492 guarantees that P and N have the same least significant log2(M) bits.
9493 N is not otherwise constrained. In particular, N is not normalized to
9494 0 <= N < M as is common. In general, the precise value of P is unknown.
9495 M is chosen as large as possible such that constant N can be determined.
9496
9497 Returns M and sets *RESIDUE to N.
9498
9499 If ALLOW_FUNC_ALIGN is true, do take functions' DECL_ALIGN_UNIT into
9500 account. This is not always possible due to PR 35705.
9501 */
9502
9503 static unsigned HOST_WIDE_INT
9504 get_pointer_modulus_and_residue (tree expr, unsigned HOST_WIDE_INT *residue,
9505 bool allow_func_align)
9506 {
9507 enum tree_code code;
9508
9509 *residue = 0;
9510
9511 code = TREE_CODE (expr);
9512 if (code == ADDR_EXPR)
9513 {
9514 unsigned int bitalign;
9515 get_object_alignment_1 (TREE_OPERAND (expr, 0), &bitalign, residue);
9516 *residue /= BITS_PER_UNIT;
9517 return bitalign / BITS_PER_UNIT;
9518 }
9519 else if (code == POINTER_PLUS_EXPR)
9520 {
9521 tree op0, op1;
9522 unsigned HOST_WIDE_INT modulus;
9523 enum tree_code inner_code;
9524
9525 op0 = TREE_OPERAND (expr, 0);
9526 STRIP_NOPS (op0);
9527 modulus = get_pointer_modulus_and_residue (op0, residue,
9528 allow_func_align);
9529
9530 op1 = TREE_OPERAND (expr, 1);
9531 STRIP_NOPS (op1);
9532 inner_code = TREE_CODE (op1);
9533 if (inner_code == INTEGER_CST)
9534 {
9535 *residue += TREE_INT_CST_LOW (op1);
9536 return modulus;
9537 }
9538 else if (inner_code == MULT_EXPR)
9539 {
9540 op1 = TREE_OPERAND (op1, 1);
9541 if (TREE_CODE (op1) == INTEGER_CST)
9542 {
9543 unsigned HOST_WIDE_INT align;
9544
9545 /* Compute the greatest power-of-2 divisor of op1. */
9546 align = TREE_INT_CST_LOW (op1);
9547 align &= -align;
9548
9549 /* If align is non-zero and less than *modulus, replace
9550 *modulus with align., If align is 0, then either op1 is 0
9551 or the greatest power-of-2 divisor of op1 doesn't fit in an
9552 unsigned HOST_WIDE_INT. In either case, no additional
9553 constraint is imposed. */
9554 if (align)
9555 modulus = MIN (modulus, align);
9556
9557 return modulus;
9558 }
9559 }
9560 }
9561
9562 /* If we get here, we were unable to determine anything useful about the
9563 expression. */
9564 return 1;
9565 }
9566
9567 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
9568 CONSTRUCTOR ARG into array ELTS and return true if successful. */
9569
9570 static bool
9571 vec_cst_ctor_to_array (tree arg, tree *elts)
9572 {
9573 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
9574
9575 if (TREE_CODE (arg) == VECTOR_CST)
9576 {
9577 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
9578 elts[i] = VECTOR_CST_ELT (arg, i);
9579 }
9580 else if (TREE_CODE (arg) == CONSTRUCTOR)
9581 {
9582 constructor_elt *elt;
9583
9584 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
9585 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
9586 return false;
9587 else
9588 elts[i] = elt->value;
9589 }
9590 else
9591 return false;
9592 for (; i < nelts; i++)
9593 elts[i]
9594 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
9595 return true;
9596 }
9597
9598 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
9599 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
9600 NULL_TREE otherwise. */
9601
9602 static tree
9603 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
9604 {
9605 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
9606 tree *elts;
9607 bool need_ctor = false;
9608
9609 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
9610 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
9611 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
9612 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
9613 return NULL_TREE;
9614
9615 elts = XALLOCAVEC (tree, nelts * 3);
9616 if (!vec_cst_ctor_to_array (arg0, elts)
9617 || !vec_cst_ctor_to_array (arg1, elts + nelts))
9618 return NULL_TREE;
9619
9620 for (i = 0; i < nelts; i++)
9621 {
9622 if (!CONSTANT_CLASS_P (elts[sel[i]]))
9623 need_ctor = true;
9624 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
9625 }
9626
9627 if (need_ctor)
9628 {
9629 vec<constructor_elt, va_gc> *v;
9630 vec_alloc (v, nelts);
9631 for (i = 0; i < nelts; i++)
9632 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9633 return build_constructor (type, v);
9634 }
9635 else
9636 return build_vector (type, &elts[2 * nelts]);
9637 }
9638
9639 /* Try to fold a pointer difference of type TYPE two address expressions of
9640 array references AREF0 and AREF1 using location LOC. Return a
9641 simplified expression for the difference or NULL_TREE. */
9642
9643 static tree
9644 fold_addr_of_array_ref_difference (location_t loc, tree type,
9645 tree aref0, tree aref1)
9646 {
9647 tree base0 = TREE_OPERAND (aref0, 0);
9648 tree base1 = TREE_OPERAND (aref1, 0);
9649 tree base_offset = build_int_cst (type, 0);
9650
9651 /* If the bases are array references as well, recurse. If the bases
9652 are pointer indirections compute the difference of the pointers.
9653 If the bases are equal, we are set. */
9654 if ((TREE_CODE (base0) == ARRAY_REF
9655 && TREE_CODE (base1) == ARRAY_REF
9656 && (base_offset
9657 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9658 || (INDIRECT_REF_P (base0)
9659 && INDIRECT_REF_P (base1)
9660 && (base_offset = fold_binary_loc (loc, MINUS_EXPR, type,
9661 TREE_OPERAND (base0, 0),
9662 TREE_OPERAND (base1, 0))))
9663 || operand_equal_p (base0, base1, 0))
9664 {
9665 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9666 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9667 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9668 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9669 return fold_build2_loc (loc, PLUS_EXPR, type,
9670 base_offset,
9671 fold_build2_loc (loc, MULT_EXPR, type,
9672 diff, esz));
9673 }
9674 return NULL_TREE;
9675 }
9676
9677 /* If the real or vector real constant CST of type TYPE has an exact
9678 inverse, return it, else return NULL. */
9679
9680 tree
9681 exact_inverse (tree type, tree cst)
9682 {
9683 REAL_VALUE_TYPE r;
9684 tree unit_type, *elts;
9685 machine_mode mode;
9686 unsigned vec_nelts, i;
9687
9688 switch (TREE_CODE (cst))
9689 {
9690 case REAL_CST:
9691 r = TREE_REAL_CST (cst);
9692
9693 if (exact_real_inverse (TYPE_MODE (type), &r))
9694 return build_real (type, r);
9695
9696 return NULL_TREE;
9697
9698 case VECTOR_CST:
9699 vec_nelts = VECTOR_CST_NELTS (cst);
9700 elts = XALLOCAVEC (tree, vec_nelts);
9701 unit_type = TREE_TYPE (type);
9702 mode = TYPE_MODE (unit_type);
9703
9704 for (i = 0; i < vec_nelts; i++)
9705 {
9706 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9707 if (!exact_real_inverse (mode, &r))
9708 return NULL_TREE;
9709 elts[i] = build_real (unit_type, r);
9710 }
9711
9712 return build_vector (type, elts);
9713
9714 default:
9715 return NULL_TREE;
9716 }
9717 }
9718
9719 /* Mask out the tz least significant bits of X of type TYPE where
9720 tz is the number of trailing zeroes in Y. */
9721 static wide_int
9722 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9723 {
9724 int tz = wi::ctz (y);
9725 if (tz > 0)
9726 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9727 return x;
9728 }
9729
9730 /* Return true when T is an address and is known to be nonzero.
9731 For floating point we further ensure that T is not denormal.
9732 Similar logic is present in nonzero_address in rtlanal.h.
9733
9734 If the return value is based on the assumption that signed overflow
9735 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9736 change *STRICT_OVERFLOW_P. */
9737
9738 static bool
9739 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9740 {
9741 tree type = TREE_TYPE (t);
9742 enum tree_code code;
9743
9744 /* Doing something useful for floating point would need more work. */
9745 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9746 return false;
9747
9748 code = TREE_CODE (t);
9749 switch (TREE_CODE_CLASS (code))
9750 {
9751 case tcc_unary:
9752 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9753 strict_overflow_p);
9754 case tcc_binary:
9755 case tcc_comparison:
9756 return tree_binary_nonzero_warnv_p (code, type,
9757 TREE_OPERAND (t, 0),
9758 TREE_OPERAND (t, 1),
9759 strict_overflow_p);
9760 case tcc_constant:
9761 case tcc_declaration:
9762 case tcc_reference:
9763 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9764
9765 default:
9766 break;
9767 }
9768
9769 switch (code)
9770 {
9771 case TRUTH_NOT_EXPR:
9772 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9773 strict_overflow_p);
9774
9775 case TRUTH_AND_EXPR:
9776 case TRUTH_OR_EXPR:
9777 case TRUTH_XOR_EXPR:
9778 return tree_binary_nonzero_warnv_p (code, type,
9779 TREE_OPERAND (t, 0),
9780 TREE_OPERAND (t, 1),
9781 strict_overflow_p);
9782
9783 case COND_EXPR:
9784 case CONSTRUCTOR:
9785 case OBJ_TYPE_REF:
9786 case ASSERT_EXPR:
9787 case ADDR_EXPR:
9788 case WITH_SIZE_EXPR:
9789 case SSA_NAME:
9790 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9791
9792 case COMPOUND_EXPR:
9793 case MODIFY_EXPR:
9794 case BIND_EXPR:
9795 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9796 strict_overflow_p);
9797
9798 case SAVE_EXPR:
9799 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9800 strict_overflow_p);
9801
9802 case CALL_EXPR:
9803 {
9804 tree fndecl = get_callee_fndecl (t);
9805 if (!fndecl) return false;
9806 if (flag_delete_null_pointer_checks && !flag_check_new
9807 && DECL_IS_OPERATOR_NEW (fndecl)
9808 && !TREE_NOTHROW (fndecl))
9809 return true;
9810 if (flag_delete_null_pointer_checks
9811 && lookup_attribute ("returns_nonnull",
9812 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9813 return true;
9814 return alloca_call_p (t);
9815 }
9816
9817 default:
9818 break;
9819 }
9820 return false;
9821 }
9822
9823 /* Return true when T is an address and is known to be nonzero.
9824 Handle warnings about undefined signed overflow. */
9825
9826 static bool
9827 tree_expr_nonzero_p (tree t)
9828 {
9829 bool ret, strict_overflow_p;
9830
9831 strict_overflow_p = false;
9832 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9833 if (strict_overflow_p)
9834 fold_overflow_warning (("assuming signed overflow does not occur when "
9835 "determining that expression is always "
9836 "non-zero"),
9837 WARN_STRICT_OVERFLOW_MISC);
9838 return ret;
9839 }
9840
9841 /* Fold a binary expression of code CODE and type TYPE with operands
9842 OP0 and OP1. LOC is the location of the resulting expression.
9843 Return the folded expression if folding is successful. Otherwise,
9844 return NULL_TREE. */
9845
9846 tree
9847 fold_binary_loc (location_t loc,
9848 enum tree_code code, tree type, tree op0, tree op1)
9849 {
9850 enum tree_code_class kind = TREE_CODE_CLASS (code);
9851 tree arg0, arg1, tem;
9852 tree t1 = NULL_TREE;
9853 bool strict_overflow_p;
9854 unsigned int prec;
9855
9856 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9857 && TREE_CODE_LENGTH (code) == 2
9858 && op0 != NULL_TREE
9859 && op1 != NULL_TREE);
9860
9861 arg0 = op0;
9862 arg1 = op1;
9863
9864 /* Strip any conversions that don't change the mode. This is
9865 safe for every expression, except for a comparison expression
9866 because its signedness is derived from its operands. So, in
9867 the latter case, only strip conversions that don't change the
9868 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9869 preserved.
9870
9871 Note that this is done as an internal manipulation within the
9872 constant folder, in order to find the simplest representation
9873 of the arguments so that their form can be studied. In any
9874 cases, the appropriate type conversions should be put back in
9875 the tree that will get out of the constant folder. */
9876
9877 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9878 {
9879 STRIP_SIGN_NOPS (arg0);
9880 STRIP_SIGN_NOPS (arg1);
9881 }
9882 else
9883 {
9884 STRIP_NOPS (arg0);
9885 STRIP_NOPS (arg1);
9886 }
9887
9888 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9889 constant but we can't do arithmetic on them. */
9890 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9891 {
9892 tem = const_binop (code, type, arg0, arg1);
9893 if (tem != NULL_TREE)
9894 {
9895 if (TREE_TYPE (tem) != type)
9896 tem = fold_convert_loc (loc, type, tem);
9897 return tem;
9898 }
9899 }
9900
9901 /* If this is a commutative operation, and ARG0 is a constant, move it
9902 to ARG1 to reduce the number of tests below. */
9903 if (commutative_tree_code (code)
9904 && tree_swap_operands_p (arg0, arg1, true))
9905 return fold_build2_loc (loc, code, type, op1, op0);
9906
9907 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9908 to ARG1 to reduce the number of tests below. */
9909 if (kind == tcc_comparison
9910 && tree_swap_operands_p (arg0, arg1, true))
9911 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9912
9913 tem = generic_simplify (loc, code, type, op0, op1);
9914 if (tem)
9915 return tem;
9916
9917 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9918
9919 First check for cases where an arithmetic operation is applied to a
9920 compound, conditional, or comparison operation. Push the arithmetic
9921 operation inside the compound or conditional to see if any folding
9922 can then be done. Convert comparison to conditional for this purpose.
9923 The also optimizes non-constant cases that used to be done in
9924 expand_expr.
9925
9926 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9927 one of the operands is a comparison and the other is a comparison, a
9928 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9929 code below would make the expression more complex. Change it to a
9930 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9931 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9932
9933 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9934 || code == EQ_EXPR || code == NE_EXPR)
9935 && TREE_CODE (type) != VECTOR_TYPE
9936 && ((truth_value_p (TREE_CODE (arg0))
9937 && (truth_value_p (TREE_CODE (arg1))
9938 || (TREE_CODE (arg1) == BIT_AND_EXPR
9939 && integer_onep (TREE_OPERAND (arg1, 1)))))
9940 || (truth_value_p (TREE_CODE (arg1))
9941 && (truth_value_p (TREE_CODE (arg0))
9942 || (TREE_CODE (arg0) == BIT_AND_EXPR
9943 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9944 {
9945 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9946 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9947 : TRUTH_XOR_EXPR,
9948 boolean_type_node,
9949 fold_convert_loc (loc, boolean_type_node, arg0),
9950 fold_convert_loc (loc, boolean_type_node, arg1));
9951
9952 if (code == EQ_EXPR)
9953 tem = invert_truthvalue_loc (loc, tem);
9954
9955 return fold_convert_loc (loc, type, tem);
9956 }
9957
9958 if (TREE_CODE_CLASS (code) == tcc_binary
9959 || TREE_CODE_CLASS (code) == tcc_comparison)
9960 {
9961 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9962 {
9963 tem = fold_build2_loc (loc, code, type,
9964 fold_convert_loc (loc, TREE_TYPE (op0),
9965 TREE_OPERAND (arg0, 1)), op1);
9966 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9967 tem);
9968 }
9969 if (TREE_CODE (arg1) == COMPOUND_EXPR
9970 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9971 {
9972 tem = fold_build2_loc (loc, code, type, op0,
9973 fold_convert_loc (loc, TREE_TYPE (op1),
9974 TREE_OPERAND (arg1, 1)));
9975 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9976 tem);
9977 }
9978
9979 if (TREE_CODE (arg0) == COND_EXPR
9980 || TREE_CODE (arg0) == VEC_COND_EXPR
9981 || COMPARISON_CLASS_P (arg0))
9982 {
9983 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9984 arg0, arg1,
9985 /*cond_first_p=*/1);
9986 if (tem != NULL_TREE)
9987 return tem;
9988 }
9989
9990 if (TREE_CODE (arg1) == COND_EXPR
9991 || TREE_CODE (arg1) == VEC_COND_EXPR
9992 || COMPARISON_CLASS_P (arg1))
9993 {
9994 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9995 arg1, arg0,
9996 /*cond_first_p=*/0);
9997 if (tem != NULL_TREE)
9998 return tem;
9999 }
10000 }
10001
10002 switch (code)
10003 {
10004 case MEM_REF:
10005 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
10006 if (TREE_CODE (arg0) == ADDR_EXPR
10007 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
10008 {
10009 tree iref = TREE_OPERAND (arg0, 0);
10010 return fold_build2 (MEM_REF, type,
10011 TREE_OPERAND (iref, 0),
10012 int_const_binop (PLUS_EXPR, arg1,
10013 TREE_OPERAND (iref, 1)));
10014 }
10015
10016 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
10017 if (TREE_CODE (arg0) == ADDR_EXPR
10018 && handled_component_p (TREE_OPERAND (arg0, 0)))
10019 {
10020 tree base;
10021 HOST_WIDE_INT coffset;
10022 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
10023 &coffset);
10024 if (!base)
10025 return NULL_TREE;
10026 return fold_build2 (MEM_REF, type,
10027 build_fold_addr_expr (base),
10028 int_const_binop (PLUS_EXPR, arg1,
10029 size_int (coffset)));
10030 }
10031
10032 return NULL_TREE;
10033
10034 case POINTER_PLUS_EXPR:
10035 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
10036 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10037 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
10038 return fold_convert_loc (loc, type,
10039 fold_build2_loc (loc, PLUS_EXPR, sizetype,
10040 fold_convert_loc (loc, sizetype,
10041 arg1),
10042 fold_convert_loc (loc, sizetype,
10043 arg0)));
10044
10045 return NULL_TREE;
10046
10047 case PLUS_EXPR:
10048 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10049 {
10050 /* X + (X / CST) * -CST is X % CST. */
10051 if (TREE_CODE (arg1) == MULT_EXPR
10052 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10053 && operand_equal_p (arg0,
10054 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
10055 {
10056 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
10057 tree cst1 = TREE_OPERAND (arg1, 1);
10058 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
10059 cst1, cst0);
10060 if (sum && integer_zerop (sum))
10061 return fold_convert_loc (loc, type,
10062 fold_build2_loc (loc, TRUNC_MOD_EXPR,
10063 TREE_TYPE (arg0), arg0,
10064 cst0));
10065 }
10066 }
10067
10068 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
10069 one. Make sure the type is not saturating and has the signedness of
10070 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10071 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10072 if ((TREE_CODE (arg0) == MULT_EXPR
10073 || TREE_CODE (arg1) == MULT_EXPR)
10074 && !TYPE_SATURATING (type)
10075 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10076 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10077 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10078 {
10079 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10080 if (tem)
10081 return tem;
10082 }
10083
10084 if (! FLOAT_TYPE_P (type))
10085 {
10086 /* If we are adding two BIT_AND_EXPR's, both of which are and'ing
10087 with a constant, and the two constants have no bits in common,
10088 we should treat this as a BIT_IOR_EXPR since this may produce more
10089 simplifications. */
10090 if (TREE_CODE (arg0) == BIT_AND_EXPR
10091 && TREE_CODE (arg1) == BIT_AND_EXPR
10092 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10093 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
10094 && wi::bit_and (TREE_OPERAND (arg0, 1),
10095 TREE_OPERAND (arg1, 1)) == 0)
10096 {
10097 code = BIT_IOR_EXPR;
10098 goto bit_ior;
10099 }
10100
10101 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
10102 (plus (plus (mult) (mult)) (foo)) so that we can
10103 take advantage of the factoring cases below. */
10104 if (ANY_INTEGRAL_TYPE_P (type)
10105 && TYPE_OVERFLOW_WRAPS (type)
10106 && (((TREE_CODE (arg0) == PLUS_EXPR
10107 || TREE_CODE (arg0) == MINUS_EXPR)
10108 && TREE_CODE (arg1) == MULT_EXPR)
10109 || ((TREE_CODE (arg1) == PLUS_EXPR
10110 || TREE_CODE (arg1) == MINUS_EXPR)
10111 && TREE_CODE (arg0) == MULT_EXPR)))
10112 {
10113 tree parg0, parg1, parg, marg;
10114 enum tree_code pcode;
10115
10116 if (TREE_CODE (arg1) == MULT_EXPR)
10117 parg = arg0, marg = arg1;
10118 else
10119 parg = arg1, marg = arg0;
10120 pcode = TREE_CODE (parg);
10121 parg0 = TREE_OPERAND (parg, 0);
10122 parg1 = TREE_OPERAND (parg, 1);
10123 STRIP_NOPS (parg0);
10124 STRIP_NOPS (parg1);
10125
10126 if (TREE_CODE (parg0) == MULT_EXPR
10127 && TREE_CODE (parg1) != MULT_EXPR)
10128 return fold_build2_loc (loc, pcode, type,
10129 fold_build2_loc (loc, PLUS_EXPR, type,
10130 fold_convert_loc (loc, type,
10131 parg0),
10132 fold_convert_loc (loc, type,
10133 marg)),
10134 fold_convert_loc (loc, type, parg1));
10135 if (TREE_CODE (parg0) != MULT_EXPR
10136 && TREE_CODE (parg1) == MULT_EXPR)
10137 return
10138 fold_build2_loc (loc, PLUS_EXPR, type,
10139 fold_convert_loc (loc, type, parg0),
10140 fold_build2_loc (loc, pcode, type,
10141 fold_convert_loc (loc, type, marg),
10142 fold_convert_loc (loc, type,
10143 parg1)));
10144 }
10145 }
10146 else
10147 {
10148 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
10149 to __complex__ ( x, y ). This is not the same for SNaNs or
10150 if signed zeros are involved. */
10151 if (!HONOR_SNANS (element_mode (arg0))
10152 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10153 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10154 {
10155 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10156 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10157 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10158 bool arg0rz = false, arg0iz = false;
10159 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10160 || (arg0i && (arg0iz = real_zerop (arg0i))))
10161 {
10162 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10163 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10164 if (arg0rz && arg1i && real_zerop (arg1i))
10165 {
10166 tree rp = arg1r ? arg1r
10167 : build1 (REALPART_EXPR, rtype, arg1);
10168 tree ip = arg0i ? arg0i
10169 : build1 (IMAGPART_EXPR, rtype, arg0);
10170 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10171 }
10172 else if (arg0iz && arg1r && real_zerop (arg1r))
10173 {
10174 tree rp = arg0r ? arg0r
10175 : build1 (REALPART_EXPR, rtype, arg0);
10176 tree ip = arg1i ? arg1i
10177 : build1 (IMAGPART_EXPR, rtype, arg1);
10178 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10179 }
10180 }
10181 }
10182
10183 if (flag_unsafe_math_optimizations
10184 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10185 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10186 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10187 return tem;
10188
10189 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
10190 We associate floats only if the user has specified
10191 -fassociative-math. */
10192 if (flag_associative_math
10193 && TREE_CODE (arg1) == PLUS_EXPR
10194 && TREE_CODE (arg0) != MULT_EXPR)
10195 {
10196 tree tree10 = TREE_OPERAND (arg1, 0);
10197 tree tree11 = TREE_OPERAND (arg1, 1);
10198 if (TREE_CODE (tree11) == MULT_EXPR
10199 && TREE_CODE (tree10) == MULT_EXPR)
10200 {
10201 tree tree0;
10202 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
10203 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
10204 }
10205 }
10206 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
10207 We associate floats only if the user has specified
10208 -fassociative-math. */
10209 if (flag_associative_math
10210 && TREE_CODE (arg0) == PLUS_EXPR
10211 && TREE_CODE (arg1) != MULT_EXPR)
10212 {
10213 tree tree00 = TREE_OPERAND (arg0, 0);
10214 tree tree01 = TREE_OPERAND (arg0, 1);
10215 if (TREE_CODE (tree01) == MULT_EXPR
10216 && TREE_CODE (tree00) == MULT_EXPR)
10217 {
10218 tree tree0;
10219 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
10220 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
10221 }
10222 }
10223 }
10224
10225 bit_rotate:
10226 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
10227 is a rotate of A by C1 bits. */
10228 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
10229 is a rotate of A by B bits. */
10230 {
10231 enum tree_code code0, code1;
10232 tree rtype;
10233 code0 = TREE_CODE (arg0);
10234 code1 = TREE_CODE (arg1);
10235 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
10236 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
10237 && operand_equal_p (TREE_OPERAND (arg0, 0),
10238 TREE_OPERAND (arg1, 0), 0)
10239 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
10240 TYPE_UNSIGNED (rtype))
10241 /* Only create rotates in complete modes. Other cases are not
10242 expanded properly. */
10243 && (element_precision (rtype)
10244 == element_precision (TYPE_MODE (rtype))))
10245 {
10246 tree tree01, tree11;
10247 enum tree_code code01, code11;
10248
10249 tree01 = TREE_OPERAND (arg0, 1);
10250 tree11 = TREE_OPERAND (arg1, 1);
10251 STRIP_NOPS (tree01);
10252 STRIP_NOPS (tree11);
10253 code01 = TREE_CODE (tree01);
10254 code11 = TREE_CODE (tree11);
10255 if (code01 == INTEGER_CST
10256 && code11 == INTEGER_CST
10257 && (wi::to_widest (tree01) + wi::to_widest (tree11)
10258 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
10259 {
10260 tem = build2_loc (loc, LROTATE_EXPR,
10261 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10262 TREE_OPERAND (arg0, 0),
10263 code0 == LSHIFT_EXPR
10264 ? TREE_OPERAND (arg0, 1)
10265 : TREE_OPERAND (arg1, 1));
10266 return fold_convert_loc (loc, type, tem);
10267 }
10268 else if (code11 == MINUS_EXPR)
10269 {
10270 tree tree110, tree111;
10271 tree110 = TREE_OPERAND (tree11, 0);
10272 tree111 = TREE_OPERAND (tree11, 1);
10273 STRIP_NOPS (tree110);
10274 STRIP_NOPS (tree111);
10275 if (TREE_CODE (tree110) == INTEGER_CST
10276 && 0 == compare_tree_int (tree110,
10277 element_precision
10278 (TREE_TYPE (TREE_OPERAND
10279 (arg0, 0))))
10280 && operand_equal_p (tree01, tree111, 0))
10281 return
10282 fold_convert_loc (loc, type,
10283 build2 ((code0 == LSHIFT_EXPR
10284 ? LROTATE_EXPR
10285 : RROTATE_EXPR),
10286 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10287 TREE_OPERAND (arg0, 0),
10288 TREE_OPERAND (arg0, 1)));
10289 }
10290 else if (code01 == MINUS_EXPR)
10291 {
10292 tree tree010, tree011;
10293 tree010 = TREE_OPERAND (tree01, 0);
10294 tree011 = TREE_OPERAND (tree01, 1);
10295 STRIP_NOPS (tree010);
10296 STRIP_NOPS (tree011);
10297 if (TREE_CODE (tree010) == INTEGER_CST
10298 && 0 == compare_tree_int (tree010,
10299 element_precision
10300 (TREE_TYPE (TREE_OPERAND
10301 (arg0, 0))))
10302 && operand_equal_p (tree11, tree011, 0))
10303 return fold_convert_loc
10304 (loc, type,
10305 build2 ((code0 != LSHIFT_EXPR
10306 ? LROTATE_EXPR
10307 : RROTATE_EXPR),
10308 TREE_TYPE (TREE_OPERAND (arg0, 0)),
10309 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
10310 }
10311 }
10312 }
10313
10314 associate:
10315 /* In most languages, can't associate operations on floats through
10316 parentheses. Rather than remember where the parentheses were, we
10317 don't associate floats at all, unless the user has specified
10318 -fassociative-math.
10319 And, we need to make sure type is not saturating. */
10320
10321 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
10322 && !TYPE_SATURATING (type))
10323 {
10324 tree var0, con0, lit0, minus_lit0;
10325 tree var1, con1, lit1, minus_lit1;
10326 tree atype = type;
10327 bool ok = true;
10328
10329 /* Split both trees into variables, constants, and literals. Then
10330 associate each group together, the constants with literals,
10331 then the result with variables. This increases the chances of
10332 literals being recombined later and of generating relocatable
10333 expressions for the sum of a constant and literal. */
10334 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
10335 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
10336 code == MINUS_EXPR);
10337
10338 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
10339 if (code == MINUS_EXPR)
10340 code = PLUS_EXPR;
10341
10342 /* With undefined overflow prefer doing association in a type
10343 which wraps on overflow, if that is one of the operand types. */
10344 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10345 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
10346 {
10347 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10348 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10349 atype = TREE_TYPE (arg0);
10350 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10351 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
10352 atype = TREE_TYPE (arg1);
10353 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
10354 }
10355
10356 /* With undefined overflow we can only associate constants with one
10357 variable, and constants whose association doesn't overflow. */
10358 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
10359 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
10360 {
10361 if (var0 && var1)
10362 {
10363 tree tmp0 = var0;
10364 tree tmp1 = var1;
10365
10366 if (TREE_CODE (tmp0) == NEGATE_EXPR)
10367 tmp0 = TREE_OPERAND (tmp0, 0);
10368 if (CONVERT_EXPR_P (tmp0)
10369 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10370 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
10371 <= TYPE_PRECISION (atype)))
10372 tmp0 = TREE_OPERAND (tmp0, 0);
10373 if (TREE_CODE (tmp1) == NEGATE_EXPR)
10374 tmp1 = TREE_OPERAND (tmp1, 0);
10375 if (CONVERT_EXPR_P (tmp1)
10376 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10377 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
10378 <= TYPE_PRECISION (atype)))
10379 tmp1 = TREE_OPERAND (tmp1, 0);
10380 /* The only case we can still associate with two variables
10381 is if they are the same, modulo negation and bit-pattern
10382 preserving conversions. */
10383 if (!operand_equal_p (tmp0, tmp1, 0))
10384 ok = false;
10385 }
10386 }
10387
10388 /* Only do something if we found more than two objects. Otherwise,
10389 nothing has changed and we risk infinite recursion. */
10390 if (ok
10391 && (2 < ((var0 != 0) + (var1 != 0)
10392 + (con0 != 0) + (con1 != 0)
10393 + (lit0 != 0) + (lit1 != 0)
10394 + (minus_lit0 != 0) + (minus_lit1 != 0))))
10395 {
10396 bool any_overflows = false;
10397 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
10398 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
10399 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
10400 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
10401 var0 = associate_trees (loc, var0, var1, code, atype);
10402 con0 = associate_trees (loc, con0, con1, code, atype);
10403 lit0 = associate_trees (loc, lit0, lit1, code, atype);
10404 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
10405 code, atype);
10406
10407 /* Preserve the MINUS_EXPR if the negative part of the literal is
10408 greater than the positive part. Otherwise, the multiplicative
10409 folding code (i.e extract_muldiv) may be fooled in case
10410 unsigned constants are subtracted, like in the following
10411 example: ((X*2 + 4) - 8U)/2. */
10412 if (minus_lit0 && lit0)
10413 {
10414 if (TREE_CODE (lit0) == INTEGER_CST
10415 && TREE_CODE (minus_lit0) == INTEGER_CST
10416 && tree_int_cst_lt (lit0, minus_lit0))
10417 {
10418 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
10419 MINUS_EXPR, atype);
10420 lit0 = 0;
10421 }
10422 else
10423 {
10424 lit0 = associate_trees (loc, lit0, minus_lit0,
10425 MINUS_EXPR, atype);
10426 minus_lit0 = 0;
10427 }
10428 }
10429
10430 /* Don't introduce overflows through reassociation. */
10431 if (!any_overflows
10432 && ((lit0 && TREE_OVERFLOW_P (lit0))
10433 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
10434 return NULL_TREE;
10435
10436 if (minus_lit0)
10437 {
10438 if (con0 == 0)
10439 return
10440 fold_convert_loc (loc, type,
10441 associate_trees (loc, var0, minus_lit0,
10442 MINUS_EXPR, atype));
10443 else
10444 {
10445 con0 = associate_trees (loc, con0, minus_lit0,
10446 MINUS_EXPR, atype);
10447 return
10448 fold_convert_loc (loc, type,
10449 associate_trees (loc, var0, con0,
10450 PLUS_EXPR, atype));
10451 }
10452 }
10453
10454 con0 = associate_trees (loc, con0, lit0, code, atype);
10455 return
10456 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
10457 code, atype));
10458 }
10459 }
10460
10461 return NULL_TREE;
10462
10463 case MINUS_EXPR:
10464 /* Pointer simplifications for subtraction, simple reassociations. */
10465 if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
10466 {
10467 /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
10468 if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
10469 && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10470 {
10471 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10472 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10473 tree arg10 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10474 tree arg11 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10475 return fold_build2_loc (loc, PLUS_EXPR, type,
10476 fold_build2_loc (loc, MINUS_EXPR, type,
10477 arg00, arg10),
10478 fold_build2_loc (loc, MINUS_EXPR, type,
10479 arg01, arg11));
10480 }
10481 /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
10482 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
10483 {
10484 tree arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10485 tree arg01 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10486 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type, arg00,
10487 fold_convert_loc (loc, type, arg1));
10488 if (tmp)
10489 return fold_build2_loc (loc, PLUS_EXPR, type, tmp, arg01);
10490 }
10491 /* PTR0 - (PTR1 p+ A) -> (PTR0 - PTR1) - A, assuming PTR0 - PTR1
10492 simplifies. */
10493 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
10494 {
10495 tree arg10 = fold_convert_loc (loc, type,
10496 TREE_OPERAND (arg1, 0));
10497 tree arg11 = fold_convert_loc (loc, type,
10498 TREE_OPERAND (arg1, 1));
10499 tree tmp = fold_binary_loc (loc, MINUS_EXPR, type,
10500 fold_convert_loc (loc, type, arg0),
10501 arg10);
10502 if (tmp)
10503 return fold_build2_loc (loc, MINUS_EXPR, type, tmp, arg11);
10504 }
10505 }
10506 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
10507 if (TREE_CODE (arg0) == NEGATE_EXPR
10508 && negate_expr_p (arg1)
10509 && reorder_operands_p (arg0, arg1))
10510 return fold_build2_loc (loc, MINUS_EXPR, type,
10511 fold_convert_loc (loc, type,
10512 negate_expr (arg1)),
10513 fold_convert_loc (loc, type,
10514 TREE_OPERAND (arg0, 0)));
10515
10516 /* X - (X / Y) * Y is X % Y. */
10517 if ((INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
10518 && TREE_CODE (arg1) == MULT_EXPR
10519 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
10520 && operand_equal_p (arg0,
10521 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0)
10522 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg1, 0), 1),
10523 TREE_OPERAND (arg1, 1), 0))
10524 return
10525 fold_convert_loc (loc, type,
10526 fold_build2_loc (loc, TRUNC_MOD_EXPR, TREE_TYPE (arg0),
10527 arg0, TREE_OPERAND (arg1, 1)));
10528
10529 if (! FLOAT_TYPE_P (type))
10530 {
10531 /* Fold A - (A & B) into ~B & A. */
10532 if (!TREE_SIDE_EFFECTS (arg0)
10533 && TREE_CODE (arg1) == BIT_AND_EXPR)
10534 {
10535 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0))
10536 {
10537 tree arg10 = fold_convert_loc (loc, type,
10538 TREE_OPERAND (arg1, 0));
10539 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10540 fold_build1_loc (loc, BIT_NOT_EXPR,
10541 type, arg10),
10542 fold_convert_loc (loc, type, arg0));
10543 }
10544 if (operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10545 {
10546 tree arg11 = fold_convert_loc (loc,
10547 type, TREE_OPERAND (arg1, 1));
10548 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10549 fold_build1_loc (loc, BIT_NOT_EXPR,
10550 type, arg11),
10551 fold_convert_loc (loc, type, arg0));
10552 }
10553 }
10554
10555 /* Fold (A & ~B) - (A & B) into (A ^ B) - B, where B is
10556 any power of 2 minus 1. */
10557 if (TREE_CODE (arg0) == BIT_AND_EXPR
10558 && TREE_CODE (arg1) == BIT_AND_EXPR
10559 && operand_equal_p (TREE_OPERAND (arg0, 0),
10560 TREE_OPERAND (arg1, 0), 0))
10561 {
10562 tree mask0 = TREE_OPERAND (arg0, 1);
10563 tree mask1 = TREE_OPERAND (arg1, 1);
10564 tree tem = fold_build1_loc (loc, BIT_NOT_EXPR, type, mask0);
10565
10566 if (operand_equal_p (tem, mask1, 0))
10567 {
10568 tem = fold_build2_loc (loc, BIT_XOR_EXPR, type,
10569 TREE_OPERAND (arg0, 0), mask1);
10570 return fold_build2_loc (loc, MINUS_EXPR, type, tem, mask1);
10571 }
10572 }
10573 }
10574
10575 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
10576 __complex__ ( x, -y ). This is not the same for SNaNs or if
10577 signed zeros are involved. */
10578 if (!HONOR_SNANS (element_mode (arg0))
10579 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10580 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
10581 {
10582 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10583 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
10584 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
10585 bool arg0rz = false, arg0iz = false;
10586 if ((arg0r && (arg0rz = real_zerop (arg0r)))
10587 || (arg0i && (arg0iz = real_zerop (arg0i))))
10588 {
10589 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
10590 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
10591 if (arg0rz && arg1i && real_zerop (arg1i))
10592 {
10593 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10594 arg1r ? arg1r
10595 : build1 (REALPART_EXPR, rtype, arg1));
10596 tree ip = arg0i ? arg0i
10597 : build1 (IMAGPART_EXPR, rtype, arg0);
10598 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10599 }
10600 else if (arg0iz && arg1r && real_zerop (arg1r))
10601 {
10602 tree rp = arg0r ? arg0r
10603 : build1 (REALPART_EXPR, rtype, arg0);
10604 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
10605 arg1i ? arg1i
10606 : build1 (IMAGPART_EXPR, rtype, arg1));
10607 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
10608 }
10609 }
10610 }
10611
10612 /* A - B -> A + (-B) if B is easily negatable. */
10613 if (negate_expr_p (arg1)
10614 && !TYPE_OVERFLOW_SANITIZED (type)
10615 && ((FLOAT_TYPE_P (type)
10616 /* Avoid this transformation if B is a positive REAL_CST. */
10617 && (TREE_CODE (arg1) != REAL_CST
10618 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg1))))
10619 || INTEGRAL_TYPE_P (type)))
10620 return fold_build2_loc (loc, PLUS_EXPR, type,
10621 fold_convert_loc (loc, type, arg0),
10622 fold_convert_loc (loc, type,
10623 negate_expr (arg1)));
10624
10625 /* Try folding difference of addresses. */
10626 {
10627 HOST_WIDE_INT diff;
10628
10629 if ((TREE_CODE (arg0) == ADDR_EXPR
10630 || TREE_CODE (arg1) == ADDR_EXPR)
10631 && ptr_difference_const (arg0, arg1, &diff))
10632 return build_int_cst_type (type, diff);
10633 }
10634
10635 /* Fold &a[i] - &a[j] to i-j. */
10636 if (TREE_CODE (arg0) == ADDR_EXPR
10637 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
10638 && TREE_CODE (arg1) == ADDR_EXPR
10639 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
10640 {
10641 tree tem = fold_addr_of_array_ref_difference (loc, type,
10642 TREE_OPERAND (arg0, 0),
10643 TREE_OPERAND (arg1, 0));
10644 if (tem)
10645 return tem;
10646 }
10647
10648 if (FLOAT_TYPE_P (type)
10649 && flag_unsafe_math_optimizations
10650 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
10651 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
10652 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
10653 return tem;
10654
10655 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
10656 one. Make sure the type is not saturating and has the signedness of
10657 the stripped operands, as fold_plusminus_mult_expr will re-associate.
10658 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
10659 if ((TREE_CODE (arg0) == MULT_EXPR
10660 || TREE_CODE (arg1) == MULT_EXPR)
10661 && !TYPE_SATURATING (type)
10662 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
10663 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
10664 && (!FLOAT_TYPE_P (type) || flag_associative_math))
10665 {
10666 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
10667 if (tem)
10668 return tem;
10669 }
10670
10671 goto associate;
10672
10673 case MULT_EXPR:
10674 /* (-A) * (-B) -> A * B */
10675 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10676 return fold_build2_loc (loc, MULT_EXPR, type,
10677 fold_convert_loc (loc, type,
10678 TREE_OPERAND (arg0, 0)),
10679 fold_convert_loc (loc, type,
10680 negate_expr (arg1)));
10681 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10682 return fold_build2_loc (loc, MULT_EXPR, type,
10683 fold_convert_loc (loc, type,
10684 negate_expr (arg0)),
10685 fold_convert_loc (loc, type,
10686 TREE_OPERAND (arg1, 0)));
10687
10688 if (! FLOAT_TYPE_P (type))
10689 {
10690 /* Transform x * -C into -x * C if x is easily negatable. */
10691 if (TREE_CODE (arg1) == INTEGER_CST
10692 && tree_int_cst_sgn (arg1) == -1
10693 && negate_expr_p (arg0)
10694 && (tem = negate_expr (arg1)) != arg1
10695 && !TREE_OVERFLOW (tem))
10696 return fold_build2_loc (loc, MULT_EXPR, type,
10697 fold_convert_loc (loc, type,
10698 negate_expr (arg0)),
10699 tem);
10700
10701 /* (a * (1 << b)) is (a << b) */
10702 if (TREE_CODE (arg1) == LSHIFT_EXPR
10703 && integer_onep (TREE_OPERAND (arg1, 0)))
10704 return fold_build2_loc (loc, LSHIFT_EXPR, type, op0,
10705 TREE_OPERAND (arg1, 1));
10706 if (TREE_CODE (arg0) == LSHIFT_EXPR
10707 && integer_onep (TREE_OPERAND (arg0, 0)))
10708 return fold_build2_loc (loc, LSHIFT_EXPR, type, op1,
10709 TREE_OPERAND (arg0, 1));
10710
10711 /* (A + A) * C -> A * 2 * C */
10712 if (TREE_CODE (arg0) == PLUS_EXPR
10713 && TREE_CODE (arg1) == INTEGER_CST
10714 && operand_equal_p (TREE_OPERAND (arg0, 0),
10715 TREE_OPERAND (arg0, 1), 0))
10716 return fold_build2_loc (loc, MULT_EXPR, type,
10717 omit_one_operand_loc (loc, type,
10718 TREE_OPERAND (arg0, 0),
10719 TREE_OPERAND (arg0, 1)),
10720 fold_build2_loc (loc, MULT_EXPR, type,
10721 build_int_cst (type, 2) , arg1));
10722
10723 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
10724 sign-changing only. */
10725 if (TREE_CODE (arg1) == INTEGER_CST
10726 && TREE_CODE (arg0) == EXACT_DIV_EXPR
10727 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
10728 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10729
10730 strict_overflow_p = false;
10731 if (TREE_CODE (arg1) == INTEGER_CST
10732 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10733 &strict_overflow_p)))
10734 {
10735 if (strict_overflow_p)
10736 fold_overflow_warning (("assuming signed overflow does not "
10737 "occur when simplifying "
10738 "multiplication"),
10739 WARN_STRICT_OVERFLOW_MISC);
10740 return fold_convert_loc (loc, type, tem);
10741 }
10742
10743 /* Optimize z * conj(z) for integer complex numbers. */
10744 if (TREE_CODE (arg0) == CONJ_EXPR
10745 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10746 return fold_mult_zconjz (loc, type, arg1);
10747 if (TREE_CODE (arg1) == CONJ_EXPR
10748 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10749 return fold_mult_zconjz (loc, type, arg0);
10750 }
10751 else
10752 {
10753 /* Convert (C1/X)*C2 into (C1*C2)/X. This transformation may change
10754 the result for floating point types due to rounding so it is applied
10755 only if -fassociative-math was specify. */
10756 if (flag_associative_math
10757 && TREE_CODE (arg0) == RDIV_EXPR
10758 && TREE_CODE (arg1) == REAL_CST
10759 && TREE_CODE (TREE_OPERAND (arg0, 0)) == REAL_CST)
10760 {
10761 tree tem = const_binop (MULT_EXPR, TREE_OPERAND (arg0, 0),
10762 arg1);
10763 if (tem)
10764 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
10765 TREE_OPERAND (arg0, 1));
10766 }
10767
10768 /* Strip sign operations from X in X*X, i.e. -Y*-Y -> Y*Y. */
10769 if (operand_equal_p (arg0, arg1, 0))
10770 {
10771 tree tem = fold_strip_sign_ops (arg0);
10772 if (tem != NULL_TREE)
10773 {
10774 tem = fold_convert_loc (loc, type, tem);
10775 return fold_build2_loc (loc, MULT_EXPR, type, tem, tem);
10776 }
10777 }
10778
10779 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10780 This is not the same for NaNs or if signed zeros are
10781 involved. */
10782 if (!HONOR_NANS (arg0)
10783 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10784 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10785 && TREE_CODE (arg1) == COMPLEX_CST
10786 && real_zerop (TREE_REALPART (arg1)))
10787 {
10788 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10789 if (real_onep (TREE_IMAGPART (arg1)))
10790 return
10791 fold_build2_loc (loc, COMPLEX_EXPR, type,
10792 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10793 rtype, arg0)),
10794 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10795 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10796 return
10797 fold_build2_loc (loc, COMPLEX_EXPR, type,
10798 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10799 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10800 rtype, arg0)));
10801 }
10802
10803 /* Optimize z * conj(z) for floating point complex numbers.
10804 Guarded by flag_unsafe_math_optimizations as non-finite
10805 imaginary components don't produce scalar results. */
10806 if (flag_unsafe_math_optimizations
10807 && TREE_CODE (arg0) == CONJ_EXPR
10808 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10809 return fold_mult_zconjz (loc, type, arg1);
10810 if (flag_unsafe_math_optimizations
10811 && TREE_CODE (arg1) == CONJ_EXPR
10812 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10813 return fold_mult_zconjz (loc, type, arg0);
10814
10815 if (flag_unsafe_math_optimizations)
10816 {
10817 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
10818 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
10819
10820 /* Optimizations of root(...)*root(...). */
10821 if (fcode0 == fcode1 && BUILTIN_ROOT_P (fcode0))
10822 {
10823 tree rootfn, arg;
10824 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10825 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10826
10827 /* Optimize sqrt(x)*sqrt(x) as x. */
10828 if (BUILTIN_SQRT_P (fcode0)
10829 && operand_equal_p (arg00, arg10, 0)
10830 && ! HONOR_SNANS (element_mode (type)))
10831 return arg00;
10832
10833 /* Optimize root(x)*root(y) as root(x*y). */
10834 rootfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10835 arg = fold_build2_loc (loc, MULT_EXPR, type, arg00, arg10);
10836 return build_call_expr_loc (loc, rootfn, 1, arg);
10837 }
10838
10839 /* Optimize expN(x)*expN(y) as expN(x+y). */
10840 if (fcode0 == fcode1 && BUILTIN_EXPONENT_P (fcode0))
10841 {
10842 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10843 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10844 CALL_EXPR_ARG (arg0, 0),
10845 CALL_EXPR_ARG (arg1, 0));
10846 return build_call_expr_loc (loc, expfn, 1, arg);
10847 }
10848
10849 /* Optimizations of pow(...)*pow(...). */
10850 if ((fcode0 == BUILT_IN_POW && fcode1 == BUILT_IN_POW)
10851 || (fcode0 == BUILT_IN_POWF && fcode1 == BUILT_IN_POWF)
10852 || (fcode0 == BUILT_IN_POWL && fcode1 == BUILT_IN_POWL))
10853 {
10854 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10855 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10856 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10857 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10858
10859 /* Optimize pow(x,y)*pow(z,y) as pow(x*z,y). */
10860 if (operand_equal_p (arg01, arg11, 0))
10861 {
10862 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10863 tree arg = fold_build2_loc (loc, MULT_EXPR, type,
10864 arg00, arg10);
10865 return build_call_expr_loc (loc, powfn, 2, arg, arg01);
10866 }
10867
10868 /* Optimize pow(x,y)*pow(x,z) as pow(x,y+z). */
10869 if (operand_equal_p (arg00, arg10, 0))
10870 {
10871 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10872 tree arg = fold_build2_loc (loc, PLUS_EXPR, type,
10873 arg01, arg11);
10874 return build_call_expr_loc (loc, powfn, 2, arg00, arg);
10875 }
10876 }
10877
10878 /* Optimize tan(x)*cos(x) as sin(x). */
10879 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_COS)
10880 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_COSF)
10881 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_COSL)
10882 || (fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_TAN)
10883 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_TANF)
10884 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_TANL))
10885 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
10886 CALL_EXPR_ARG (arg1, 0), 0))
10887 {
10888 tree sinfn = mathfn_built_in (type, BUILT_IN_SIN);
10889
10890 if (sinfn != NULL_TREE)
10891 return build_call_expr_loc (loc, sinfn, 1,
10892 CALL_EXPR_ARG (arg0, 0));
10893 }
10894
10895 /* Optimize x*pow(x,c) as pow(x,c+1). */
10896 if (fcode1 == BUILT_IN_POW
10897 || fcode1 == BUILT_IN_POWF
10898 || fcode1 == BUILT_IN_POWL)
10899 {
10900 tree arg10 = CALL_EXPR_ARG (arg1, 0);
10901 tree arg11 = CALL_EXPR_ARG (arg1, 1);
10902 if (TREE_CODE (arg11) == REAL_CST
10903 && !TREE_OVERFLOW (arg11)
10904 && operand_equal_p (arg0, arg10, 0))
10905 {
10906 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
10907 REAL_VALUE_TYPE c;
10908 tree arg;
10909
10910 c = TREE_REAL_CST (arg11);
10911 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10912 arg = build_real (type, c);
10913 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10914 }
10915 }
10916
10917 /* Optimize pow(x,c)*x as pow(x,c+1). */
10918 if (fcode0 == BUILT_IN_POW
10919 || fcode0 == BUILT_IN_POWF
10920 || fcode0 == BUILT_IN_POWL)
10921 {
10922 tree arg00 = CALL_EXPR_ARG (arg0, 0);
10923 tree arg01 = CALL_EXPR_ARG (arg0, 1);
10924 if (TREE_CODE (arg01) == REAL_CST
10925 && !TREE_OVERFLOW (arg01)
10926 && operand_equal_p (arg1, arg00, 0))
10927 {
10928 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
10929 REAL_VALUE_TYPE c;
10930 tree arg;
10931
10932 c = TREE_REAL_CST (arg01);
10933 real_arithmetic (&c, PLUS_EXPR, &c, &dconst1);
10934 arg = build_real (type, c);
10935 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
10936 }
10937 }
10938
10939 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
10940 if (!in_gimple_form
10941 && optimize
10942 && operand_equal_p (arg0, arg1, 0))
10943 {
10944 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
10945
10946 if (powfn)
10947 {
10948 tree arg = build_real (type, dconst2);
10949 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
10950 }
10951 }
10952 }
10953 }
10954 goto associate;
10955
10956 case BIT_IOR_EXPR:
10957 bit_ior:
10958 /* ~X | X is -1. */
10959 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10960 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10961 {
10962 t1 = build_zero_cst (type);
10963 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10964 return omit_one_operand_loc (loc, type, t1, arg1);
10965 }
10966
10967 /* X | ~X is -1. */
10968 if (TREE_CODE (arg1) == BIT_NOT_EXPR
10969 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10970 {
10971 t1 = build_zero_cst (type);
10972 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
10973 return omit_one_operand_loc (loc, type, t1, arg0);
10974 }
10975
10976 /* Canonicalize (X & C1) | C2. */
10977 if (TREE_CODE (arg0) == BIT_AND_EXPR
10978 && TREE_CODE (arg1) == INTEGER_CST
10979 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10980 {
10981 int width = TYPE_PRECISION (type), w;
10982 wide_int c1 = TREE_OPERAND (arg0, 1);
10983 wide_int c2 = arg1;
10984
10985 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10986 if ((c1 & c2) == c1)
10987 return omit_one_operand_loc (loc, type, arg1,
10988 TREE_OPERAND (arg0, 0));
10989
10990 wide_int msk = wi::mask (width, false,
10991 TYPE_PRECISION (TREE_TYPE (arg1)));
10992
10993 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10994 if (msk.and_not (c1 | c2) == 0)
10995 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10996 TREE_OPERAND (arg0, 0), arg1);
10997
10998 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10999 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
11000 mode which allows further optimizations. */
11001 c1 &= msk;
11002 c2 &= msk;
11003 wide_int c3 = c1.and_not (c2);
11004 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
11005 {
11006 wide_int mask = wi::mask (w, false,
11007 TYPE_PRECISION (type));
11008 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
11009 {
11010 c3 = mask;
11011 break;
11012 }
11013 }
11014
11015 if (c3 != c1)
11016 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
11017 fold_build2_loc (loc, BIT_AND_EXPR, type,
11018 TREE_OPERAND (arg0, 0),
11019 wide_int_to_tree (type,
11020 c3)),
11021 arg1);
11022 }
11023
11024 /* (X & ~Y) | (~X & Y) is X ^ Y */
11025 if (TREE_CODE (arg0) == BIT_AND_EXPR
11026 && TREE_CODE (arg1) == BIT_AND_EXPR)
11027 {
11028 tree a0, a1, l0, l1, n0, n1;
11029
11030 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11031 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11032
11033 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11034 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11035
11036 n0 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l0);
11037 n1 = fold_build1_loc (loc, BIT_NOT_EXPR, type, l1);
11038
11039 if ((operand_equal_p (n0, a0, 0)
11040 && operand_equal_p (n1, a1, 0))
11041 || (operand_equal_p (n0, a1, 0)
11042 && operand_equal_p (n1, a0, 0)))
11043 return fold_build2_loc (loc, BIT_XOR_EXPR, type, l0, n1);
11044 }
11045
11046 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11047 if (t1 != NULL_TREE)
11048 return t1;
11049
11050 /* Convert (or (not arg0) (not arg1)) to (not (and (arg0) (arg1))).
11051
11052 This results in more efficient code for machines without a NAND
11053 instruction. Combine will canonicalize to the first form
11054 which will allow use of NAND instructions provided by the
11055 backend if they exist. */
11056 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11057 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11058 {
11059 return
11060 fold_build1_loc (loc, BIT_NOT_EXPR, type,
11061 build2 (BIT_AND_EXPR, type,
11062 fold_convert_loc (loc, type,
11063 TREE_OPERAND (arg0, 0)),
11064 fold_convert_loc (loc, type,
11065 TREE_OPERAND (arg1, 0))));
11066 }
11067
11068 /* See if this can be simplified into a rotate first. If that
11069 is unsuccessful continue in the association code. */
11070 goto bit_rotate;
11071
11072 case BIT_XOR_EXPR:
11073 /* ~X ^ X is -1. */
11074 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11075 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11076 {
11077 t1 = build_zero_cst (type);
11078 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11079 return omit_one_operand_loc (loc, type, t1, arg1);
11080 }
11081
11082 /* X ^ ~X is -1. */
11083 if (TREE_CODE (arg1) == BIT_NOT_EXPR
11084 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11085 {
11086 t1 = build_zero_cst (type);
11087 t1 = fold_unary_loc (loc, BIT_NOT_EXPR, type, t1);
11088 return omit_one_operand_loc (loc, type, t1, arg0);
11089 }
11090
11091 /* If we are XORing two BIT_AND_EXPR's, both of which are and'ing
11092 with a constant, and the two constants have no bits in common,
11093 we should treat this as a BIT_IOR_EXPR since this may produce more
11094 simplifications. */
11095 if (TREE_CODE (arg0) == BIT_AND_EXPR
11096 && TREE_CODE (arg1) == BIT_AND_EXPR
11097 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11098 && TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
11099 && wi::bit_and (TREE_OPERAND (arg0, 1),
11100 TREE_OPERAND (arg1, 1)) == 0)
11101 {
11102 code = BIT_IOR_EXPR;
11103 goto bit_ior;
11104 }
11105
11106 /* (X | Y) ^ X -> Y & ~ X*/
11107 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11108 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11109 {
11110 tree t2 = TREE_OPERAND (arg0, 1);
11111 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11112 arg1);
11113 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11114 fold_convert_loc (loc, type, t2),
11115 fold_convert_loc (loc, type, t1));
11116 return t1;
11117 }
11118
11119 /* (Y | X) ^ X -> Y & ~ X*/
11120 if (TREE_CODE (arg0) == BIT_IOR_EXPR
11121 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11122 {
11123 tree t2 = TREE_OPERAND (arg0, 0);
11124 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1),
11125 arg1);
11126 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11127 fold_convert_loc (loc, type, t2),
11128 fold_convert_loc (loc, type, t1));
11129 return t1;
11130 }
11131
11132 /* X ^ (X | Y) -> Y & ~ X*/
11133 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11134 && operand_equal_p (TREE_OPERAND (arg1, 0), arg0, 0))
11135 {
11136 tree t2 = TREE_OPERAND (arg1, 1);
11137 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11138 arg0);
11139 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11140 fold_convert_loc (loc, type, t2),
11141 fold_convert_loc (loc, type, t1));
11142 return t1;
11143 }
11144
11145 /* X ^ (Y | X) -> Y & ~ X*/
11146 if (TREE_CODE (arg1) == BIT_IOR_EXPR
11147 && operand_equal_p (TREE_OPERAND (arg1, 1), arg0, 0))
11148 {
11149 tree t2 = TREE_OPERAND (arg1, 0);
11150 t1 = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg0),
11151 arg0);
11152 t1 = fold_build2_loc (loc, BIT_AND_EXPR, type,
11153 fold_convert_loc (loc, type, t2),
11154 fold_convert_loc (loc, type, t1));
11155 return t1;
11156 }
11157
11158 /* Convert ~X ^ ~Y to X ^ Y. */
11159 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11160 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11161 return fold_build2_loc (loc, code, type,
11162 fold_convert_loc (loc, type,
11163 TREE_OPERAND (arg0, 0)),
11164 fold_convert_loc (loc, type,
11165 TREE_OPERAND (arg1, 0)));
11166
11167 /* Convert ~X ^ C to X ^ ~C. */
11168 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11169 && TREE_CODE (arg1) == INTEGER_CST)
11170 return fold_build2_loc (loc, code, type,
11171 fold_convert_loc (loc, type,
11172 TREE_OPERAND (arg0, 0)),
11173 fold_build1_loc (loc, BIT_NOT_EXPR, type, arg1));
11174
11175 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
11176 if (TREE_CODE (arg0) == BIT_AND_EXPR
11177 && INTEGRAL_TYPE_P (type)
11178 && integer_onep (TREE_OPERAND (arg0, 1))
11179 && integer_onep (arg1))
11180 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
11181 build_zero_cst (TREE_TYPE (arg0)));
11182
11183 /* Fold (X & Y) ^ Y as ~X & Y. */
11184 if (TREE_CODE (arg0) == BIT_AND_EXPR
11185 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11186 {
11187 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11188 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11189 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11190 fold_convert_loc (loc, type, arg1));
11191 }
11192 /* Fold (X & Y) ^ X as ~Y & X. */
11193 if (TREE_CODE (arg0) == BIT_AND_EXPR
11194 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11195 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11196 {
11197 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11198 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11199 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11200 fold_convert_loc (loc, type, arg1));
11201 }
11202 /* Fold X ^ (X & Y) as X & ~Y. */
11203 if (TREE_CODE (arg1) == BIT_AND_EXPR
11204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11205 {
11206 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11207 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11208 fold_convert_loc (loc, type, arg0),
11209 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11210 }
11211 /* Fold X ^ (Y & X) as ~Y & X. */
11212 if (TREE_CODE (arg1) == BIT_AND_EXPR
11213 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11214 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11215 {
11216 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11217 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11218 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11219 fold_convert_loc (loc, type, arg0));
11220 }
11221
11222 /* See if this can be simplified into a rotate first. If that
11223 is unsuccessful continue in the association code. */
11224 goto bit_rotate;
11225
11226 case BIT_AND_EXPR:
11227 /* ~X & X, (X == 0) & X, and !X & X are always zero. */
11228 if ((TREE_CODE (arg0) == BIT_NOT_EXPR
11229 || TREE_CODE (arg0) == TRUTH_NOT_EXPR
11230 || (TREE_CODE (arg0) == EQ_EXPR
11231 && integer_zerop (TREE_OPERAND (arg0, 1))))
11232 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
11233 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
11234
11235 /* X & ~X , X & (X == 0), and X & !X are always zero. */
11236 if ((TREE_CODE (arg1) == BIT_NOT_EXPR
11237 || TREE_CODE (arg1) == TRUTH_NOT_EXPR
11238 || (TREE_CODE (arg1) == EQ_EXPR
11239 && integer_zerop (TREE_OPERAND (arg1, 1))))
11240 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11241 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
11242
11243 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
11244 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11245 && INTEGRAL_TYPE_P (type)
11246 && integer_onep (TREE_OPERAND (arg0, 1))
11247 && integer_onep (arg1))
11248 {
11249 tree tem2;
11250 tem = TREE_OPERAND (arg0, 0);
11251 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11252 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11253 tem, tem2);
11254 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11255 build_zero_cst (TREE_TYPE (tem)));
11256 }
11257 /* Fold ~X & 1 as (X & 1) == 0. */
11258 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11259 && INTEGRAL_TYPE_P (type)
11260 && integer_onep (arg1))
11261 {
11262 tree tem2;
11263 tem = TREE_OPERAND (arg0, 0);
11264 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
11265 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
11266 tem, tem2);
11267 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
11268 build_zero_cst (TREE_TYPE (tem)));
11269 }
11270 /* Fold !X & 1 as X == 0. */
11271 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
11272 && integer_onep (arg1))
11273 {
11274 tem = TREE_OPERAND (arg0, 0);
11275 return fold_build2_loc (loc, EQ_EXPR, type, tem,
11276 build_zero_cst (TREE_TYPE (tem)));
11277 }
11278
11279 /* Fold (X ^ Y) & Y as ~X & Y. */
11280 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11281 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
11282 {
11283 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11284 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11285 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11286 fold_convert_loc (loc, type, arg1));
11287 }
11288 /* Fold (X ^ Y) & X as ~Y & X. */
11289 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11290 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11291 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
11292 {
11293 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
11294 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11295 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11296 fold_convert_loc (loc, type, arg1));
11297 }
11298 /* Fold X & (X ^ Y) as X & ~Y. */
11299 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11300 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
11301 {
11302 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
11303 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11304 fold_convert_loc (loc, type, arg0),
11305 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
11306 }
11307 /* Fold X & (Y ^ X) as ~Y & X. */
11308 if (TREE_CODE (arg1) == BIT_XOR_EXPR
11309 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
11310 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
11311 {
11312 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
11313 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11314 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
11315 fold_convert_loc (loc, type, arg0));
11316 }
11317
11318 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
11319 multiple of 1 << CST. */
11320 if (TREE_CODE (arg1) == INTEGER_CST)
11321 {
11322 wide_int cst1 = arg1;
11323 wide_int ncst1 = -cst1;
11324 if ((cst1 & ncst1) == ncst1
11325 && multiple_of_p (type, arg0,
11326 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
11327 return fold_convert_loc (loc, type, arg0);
11328 }
11329
11330 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
11331 bits from CST2. */
11332 if (TREE_CODE (arg1) == INTEGER_CST
11333 && TREE_CODE (arg0) == MULT_EXPR
11334 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
11335 {
11336 wide_int warg1 = arg1;
11337 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
11338
11339 if (masked == 0)
11340 return omit_two_operands_loc (loc, type, build_zero_cst (type),
11341 arg0, arg1);
11342 else if (masked != warg1)
11343 {
11344 /* Avoid the transform if arg1 is a mask of some
11345 mode which allows further optimizations. */
11346 int pop = wi::popcount (warg1);
11347 if (!(pop >= BITS_PER_UNIT
11348 && exact_log2 (pop) != -1
11349 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
11350 return fold_build2_loc (loc, code, type, op0,
11351 wide_int_to_tree (type, masked));
11352 }
11353 }
11354
11355 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
11356 ((A & N) + B) & M -> (A + B) & M
11357 Similarly if (N & M) == 0,
11358 ((A | N) + B) & M -> (A + B) & M
11359 and for - instead of + (or unary - instead of +)
11360 and/or ^ instead of |.
11361 If B is constant and (B & M) == 0, fold into A & M. */
11362 if (TREE_CODE (arg1) == INTEGER_CST)
11363 {
11364 wide_int cst1 = arg1;
11365 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
11366 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11367 && (TREE_CODE (arg0) == PLUS_EXPR
11368 || TREE_CODE (arg0) == MINUS_EXPR
11369 || TREE_CODE (arg0) == NEGATE_EXPR)
11370 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
11371 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
11372 {
11373 tree pmop[2];
11374 int which = 0;
11375 wide_int cst0;
11376
11377 /* Now we know that arg0 is (C + D) or (C - D) or
11378 -C and arg1 (M) is == (1LL << cst) - 1.
11379 Store C into PMOP[0] and D into PMOP[1]. */
11380 pmop[0] = TREE_OPERAND (arg0, 0);
11381 pmop[1] = NULL;
11382 if (TREE_CODE (arg0) != NEGATE_EXPR)
11383 {
11384 pmop[1] = TREE_OPERAND (arg0, 1);
11385 which = 1;
11386 }
11387
11388 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
11389 which = -1;
11390
11391 for (; which >= 0; which--)
11392 switch (TREE_CODE (pmop[which]))
11393 {
11394 case BIT_AND_EXPR:
11395 case BIT_IOR_EXPR:
11396 case BIT_XOR_EXPR:
11397 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
11398 != INTEGER_CST)
11399 break;
11400 cst0 = TREE_OPERAND (pmop[which], 1);
11401 cst0 &= cst1;
11402 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
11403 {
11404 if (cst0 != cst1)
11405 break;
11406 }
11407 else if (cst0 != 0)
11408 break;
11409 /* If C or D is of the form (A & N) where
11410 (N & M) == M, or of the form (A | N) or
11411 (A ^ N) where (N & M) == 0, replace it with A. */
11412 pmop[which] = TREE_OPERAND (pmop[which], 0);
11413 break;
11414 case INTEGER_CST:
11415 /* If C or D is a N where (N & M) == 0, it can be
11416 omitted (assumed 0). */
11417 if ((TREE_CODE (arg0) == PLUS_EXPR
11418 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
11419 && (cst1 & pmop[which]) == 0)
11420 pmop[which] = NULL;
11421 break;
11422 default:
11423 break;
11424 }
11425
11426 /* Only build anything new if we optimized one or both arguments
11427 above. */
11428 if (pmop[0] != TREE_OPERAND (arg0, 0)
11429 || (TREE_CODE (arg0) != NEGATE_EXPR
11430 && pmop[1] != TREE_OPERAND (arg0, 1)))
11431 {
11432 tree utype = TREE_TYPE (arg0);
11433 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
11434 {
11435 /* Perform the operations in a type that has defined
11436 overflow behavior. */
11437 utype = unsigned_type_for (TREE_TYPE (arg0));
11438 if (pmop[0] != NULL)
11439 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
11440 if (pmop[1] != NULL)
11441 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
11442 }
11443
11444 if (TREE_CODE (arg0) == NEGATE_EXPR)
11445 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
11446 else if (TREE_CODE (arg0) == PLUS_EXPR)
11447 {
11448 if (pmop[0] != NULL && pmop[1] != NULL)
11449 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
11450 pmop[0], pmop[1]);
11451 else if (pmop[0] != NULL)
11452 tem = pmop[0];
11453 else if (pmop[1] != NULL)
11454 tem = pmop[1];
11455 else
11456 return build_int_cst (type, 0);
11457 }
11458 else if (pmop[0] == NULL)
11459 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
11460 else
11461 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
11462 pmop[0], pmop[1]);
11463 /* TEM is now the new binary +, - or unary - replacement. */
11464 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
11465 fold_convert_loc (loc, utype, arg1));
11466 return fold_convert_loc (loc, type, tem);
11467 }
11468 }
11469 }
11470
11471 t1 = distribute_bit_expr (loc, code, type, arg0, arg1);
11472 if (t1 != NULL_TREE)
11473 return t1;
11474 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
11475 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
11476 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
11477 {
11478 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
11479
11480 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
11481 if (mask == -1)
11482 return
11483 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
11484 }
11485
11486 /* Convert (and (not arg0) (not arg1)) to (not (or (arg0) (arg1))).
11487
11488 This results in more efficient code for machines without a NOR
11489 instruction. Combine will canonicalize to the first form
11490 which will allow use of NOR instructions provided by the
11491 backend if they exist. */
11492 if (TREE_CODE (arg0) == BIT_NOT_EXPR
11493 && TREE_CODE (arg1) == BIT_NOT_EXPR)
11494 {
11495 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
11496 build2 (BIT_IOR_EXPR, type,
11497 fold_convert_loc (loc, type,
11498 TREE_OPERAND (arg0, 0)),
11499 fold_convert_loc (loc, type,
11500 TREE_OPERAND (arg1, 0))));
11501 }
11502
11503 /* If arg0 is derived from the address of an object or function, we may
11504 be able to fold this expression using the object or function's
11505 alignment. */
11506 if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1))
11507 {
11508 unsigned HOST_WIDE_INT modulus, residue;
11509 unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1);
11510
11511 modulus = get_pointer_modulus_and_residue (arg0, &residue,
11512 integer_onep (arg1));
11513
11514 /* This works because modulus is a power of 2. If this weren't the
11515 case, we'd have to replace it by its greatest power-of-2
11516 divisor: modulus & -modulus. */
11517 if (low < modulus)
11518 return build_int_cst (type, residue & low);
11519 }
11520
11521 /* Fold (X << C1) & C2 into (X << C1) & (C2 | ((1 << C1) - 1))
11522 (X >> C1) & C2 into (X >> C1) & (C2 | ~((type) -1 >> C1))
11523 if the new mask might be further optimized. */
11524 if ((TREE_CODE (arg0) == LSHIFT_EXPR
11525 || TREE_CODE (arg0) == RSHIFT_EXPR)
11526 && TYPE_PRECISION (TREE_TYPE (arg0)) <= HOST_BITS_PER_WIDE_INT
11527 && TREE_CODE (arg1) == INTEGER_CST
11528 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11529 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) > 0
11530 && (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11531 < TYPE_PRECISION (TREE_TYPE (arg0))))
11532 {
11533 unsigned int shiftc = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11534 unsigned HOST_WIDE_INT mask = TREE_INT_CST_LOW (arg1);
11535 unsigned HOST_WIDE_INT newmask, zerobits = 0;
11536 tree shift_type = TREE_TYPE (arg0);
11537
11538 if (TREE_CODE (arg0) == LSHIFT_EXPR)
11539 zerobits = ((((unsigned HOST_WIDE_INT) 1) << shiftc) - 1);
11540 else if (TREE_CODE (arg0) == RSHIFT_EXPR
11541 && TYPE_PRECISION (TREE_TYPE (arg0))
11542 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg0))))
11543 {
11544 prec = TYPE_PRECISION (TREE_TYPE (arg0));
11545 tree arg00 = TREE_OPERAND (arg0, 0);
11546 /* See if more bits can be proven as zero because of
11547 zero extension. */
11548 if (TREE_CODE (arg00) == NOP_EXPR
11549 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg00, 0))))
11550 {
11551 tree inner_type = TREE_TYPE (TREE_OPERAND (arg00, 0));
11552 if (TYPE_PRECISION (inner_type)
11553 == GET_MODE_PRECISION (TYPE_MODE (inner_type))
11554 && TYPE_PRECISION (inner_type) < prec)
11555 {
11556 prec = TYPE_PRECISION (inner_type);
11557 /* See if we can shorten the right shift. */
11558 if (shiftc < prec)
11559 shift_type = inner_type;
11560 /* Otherwise X >> C1 is all zeros, so we'll optimize
11561 it into (X, 0) later on by making sure zerobits
11562 is all ones. */
11563 }
11564 }
11565 zerobits = ~(unsigned HOST_WIDE_INT) 0;
11566 if (shiftc < prec)
11567 {
11568 zerobits >>= HOST_BITS_PER_WIDE_INT - shiftc;
11569 zerobits <<= prec - shiftc;
11570 }
11571 /* For arithmetic shift if sign bit could be set, zerobits
11572 can contain actually sign bits, so no transformation is
11573 possible, unless MASK masks them all away. In that
11574 case the shift needs to be converted into logical shift. */
11575 if (!TYPE_UNSIGNED (TREE_TYPE (arg0))
11576 && prec == TYPE_PRECISION (TREE_TYPE (arg0)))
11577 {
11578 if ((mask & zerobits) == 0)
11579 shift_type = unsigned_type_for (TREE_TYPE (arg0));
11580 else
11581 zerobits = 0;
11582 }
11583 }
11584
11585 /* ((X << 16) & 0xff00) is (X, 0). */
11586 if ((mask & zerobits) == mask)
11587 return omit_one_operand_loc (loc, type,
11588 build_int_cst (type, 0), arg0);
11589
11590 newmask = mask | zerobits;
11591 if (newmask != mask && (newmask & (newmask + 1)) == 0)
11592 {
11593 /* Only do the transformation if NEWMASK is some integer
11594 mode's mask. */
11595 for (prec = BITS_PER_UNIT;
11596 prec < HOST_BITS_PER_WIDE_INT; prec <<= 1)
11597 if (newmask == (((unsigned HOST_WIDE_INT) 1) << prec) - 1)
11598 break;
11599 if (prec < HOST_BITS_PER_WIDE_INT
11600 || newmask == ~(unsigned HOST_WIDE_INT) 0)
11601 {
11602 tree newmaskt;
11603
11604 if (shift_type != TREE_TYPE (arg0))
11605 {
11606 tem = fold_build2_loc (loc, TREE_CODE (arg0), shift_type,
11607 fold_convert_loc (loc, shift_type,
11608 TREE_OPERAND (arg0, 0)),
11609 TREE_OPERAND (arg0, 1));
11610 tem = fold_convert_loc (loc, type, tem);
11611 }
11612 else
11613 tem = op0;
11614 newmaskt = build_int_cst_type (TREE_TYPE (op1), newmask);
11615 if (!tree_int_cst_equal (newmaskt, arg1))
11616 return fold_build2_loc (loc, BIT_AND_EXPR, type, tem, newmaskt);
11617 }
11618 }
11619 }
11620
11621 goto associate;
11622
11623 case RDIV_EXPR:
11624 /* Don't touch a floating-point divide by zero unless the mode
11625 of the constant can represent infinity. */
11626 if (TREE_CODE (arg1) == REAL_CST
11627 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
11628 && real_zerop (arg1))
11629 return NULL_TREE;
11630
11631 /* (-A) / (-B) -> A / B */
11632 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
11633 return fold_build2_loc (loc, RDIV_EXPR, type,
11634 TREE_OPERAND (arg0, 0),
11635 negate_expr (arg1));
11636 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
11637 return fold_build2_loc (loc, RDIV_EXPR, type,
11638 negate_expr (arg0),
11639 TREE_OPERAND (arg1, 0));
11640
11641 /* Convert A/B/C to A/(B*C). */
11642 if (flag_reciprocal_math
11643 && TREE_CODE (arg0) == RDIV_EXPR)
11644 return fold_build2_loc (loc, RDIV_EXPR, type, TREE_OPERAND (arg0, 0),
11645 fold_build2_loc (loc, MULT_EXPR, type,
11646 TREE_OPERAND (arg0, 1), arg1));
11647
11648 /* Convert A/(B/C) to (A/B)*C. */
11649 if (flag_reciprocal_math
11650 && TREE_CODE (arg1) == RDIV_EXPR)
11651 return fold_build2_loc (loc, MULT_EXPR, type,
11652 fold_build2_loc (loc, RDIV_EXPR, type, arg0,
11653 TREE_OPERAND (arg1, 0)),
11654 TREE_OPERAND (arg1, 1));
11655
11656 /* Convert C1/(X*C2) into (C1/C2)/X. */
11657 if (flag_reciprocal_math
11658 && TREE_CODE (arg1) == MULT_EXPR
11659 && TREE_CODE (arg0) == REAL_CST
11660 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
11661 {
11662 tree tem = const_binop (RDIV_EXPR, arg0,
11663 TREE_OPERAND (arg1, 1));
11664 if (tem)
11665 return fold_build2_loc (loc, RDIV_EXPR, type, tem,
11666 TREE_OPERAND (arg1, 0));
11667 }
11668
11669 if (flag_unsafe_math_optimizations)
11670 {
11671 enum built_in_function fcode0 = builtin_mathfn_code (arg0);
11672 enum built_in_function fcode1 = builtin_mathfn_code (arg1);
11673
11674 /* Optimize sin(x)/cos(x) as tan(x). */
11675 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_COS)
11676 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_COSF)
11677 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_COSL))
11678 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11679 CALL_EXPR_ARG (arg1, 0), 0))
11680 {
11681 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11682
11683 if (tanfn != NULL_TREE)
11684 return build_call_expr_loc (loc, tanfn, 1, CALL_EXPR_ARG (arg0, 0));
11685 }
11686
11687 /* Optimize cos(x)/sin(x) as 1.0/tan(x). */
11688 if (((fcode0 == BUILT_IN_COS && fcode1 == BUILT_IN_SIN)
11689 || (fcode0 == BUILT_IN_COSF && fcode1 == BUILT_IN_SINF)
11690 || (fcode0 == BUILT_IN_COSL && fcode1 == BUILT_IN_SINL))
11691 && operand_equal_p (CALL_EXPR_ARG (arg0, 0),
11692 CALL_EXPR_ARG (arg1, 0), 0))
11693 {
11694 tree tanfn = mathfn_built_in (type, BUILT_IN_TAN);
11695
11696 if (tanfn != NULL_TREE)
11697 {
11698 tree tmp = build_call_expr_loc (loc, tanfn, 1,
11699 CALL_EXPR_ARG (arg0, 0));
11700 return fold_build2_loc (loc, RDIV_EXPR, type,
11701 build_real (type, dconst1), tmp);
11702 }
11703 }
11704
11705 /* Optimize sin(x)/tan(x) as cos(x) if we don't care about
11706 NaNs or Infinities. */
11707 if (((fcode0 == BUILT_IN_SIN && fcode1 == BUILT_IN_TAN)
11708 || (fcode0 == BUILT_IN_SINF && fcode1 == BUILT_IN_TANF)
11709 || (fcode0 == BUILT_IN_SINL && fcode1 == BUILT_IN_TANL)))
11710 {
11711 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11712 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11713
11714 if (! HONOR_NANS (arg00)
11715 && ! HONOR_INFINITIES (element_mode (arg00))
11716 && operand_equal_p (arg00, arg01, 0))
11717 {
11718 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11719
11720 if (cosfn != NULL_TREE)
11721 return build_call_expr_loc (loc, cosfn, 1, arg00);
11722 }
11723 }
11724
11725 /* Optimize tan(x)/sin(x) as 1.0/cos(x) if we don't care about
11726 NaNs or Infinities. */
11727 if (((fcode0 == BUILT_IN_TAN && fcode1 == BUILT_IN_SIN)
11728 || (fcode0 == BUILT_IN_TANF && fcode1 == BUILT_IN_SINF)
11729 || (fcode0 == BUILT_IN_TANL && fcode1 == BUILT_IN_SINL)))
11730 {
11731 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11732 tree arg01 = CALL_EXPR_ARG (arg1, 0);
11733
11734 if (! HONOR_NANS (arg00)
11735 && ! HONOR_INFINITIES (element_mode (arg00))
11736 && operand_equal_p (arg00, arg01, 0))
11737 {
11738 tree cosfn = mathfn_built_in (type, BUILT_IN_COS);
11739
11740 if (cosfn != NULL_TREE)
11741 {
11742 tree tmp = build_call_expr_loc (loc, cosfn, 1, arg00);
11743 return fold_build2_loc (loc, RDIV_EXPR, type,
11744 build_real (type, dconst1),
11745 tmp);
11746 }
11747 }
11748 }
11749
11750 /* Optimize pow(x,c)/x as pow(x,c-1). */
11751 if (fcode0 == BUILT_IN_POW
11752 || fcode0 == BUILT_IN_POWF
11753 || fcode0 == BUILT_IN_POWL)
11754 {
11755 tree arg00 = CALL_EXPR_ARG (arg0, 0);
11756 tree arg01 = CALL_EXPR_ARG (arg0, 1);
11757 if (TREE_CODE (arg01) == REAL_CST
11758 && !TREE_OVERFLOW (arg01)
11759 && operand_equal_p (arg1, arg00, 0))
11760 {
11761 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg0), 0);
11762 REAL_VALUE_TYPE c;
11763 tree arg;
11764
11765 c = TREE_REAL_CST (arg01);
11766 real_arithmetic (&c, MINUS_EXPR, &c, &dconst1);
11767 arg = build_real (type, c);
11768 return build_call_expr_loc (loc, powfn, 2, arg1, arg);
11769 }
11770 }
11771
11772 /* Optimize a/root(b/c) into a*root(c/b). */
11773 if (BUILTIN_ROOT_P (fcode1))
11774 {
11775 tree rootarg = CALL_EXPR_ARG (arg1, 0);
11776
11777 if (TREE_CODE (rootarg) == RDIV_EXPR)
11778 {
11779 tree rootfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11780 tree b = TREE_OPERAND (rootarg, 0);
11781 tree c = TREE_OPERAND (rootarg, 1);
11782
11783 tree tmp = fold_build2_loc (loc, RDIV_EXPR, type, c, b);
11784
11785 tmp = build_call_expr_loc (loc, rootfn, 1, tmp);
11786 return fold_build2_loc (loc, MULT_EXPR, type, arg0, tmp);
11787 }
11788 }
11789
11790 /* Optimize x/expN(y) into x*expN(-y). */
11791 if (BUILTIN_EXPONENT_P (fcode1))
11792 {
11793 tree expfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11794 tree arg = negate_expr (CALL_EXPR_ARG (arg1, 0));
11795 arg1 = build_call_expr_loc (loc,
11796 expfn, 1,
11797 fold_convert_loc (loc, type, arg));
11798 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11799 }
11800
11801 /* Optimize x/pow(y,z) into x*pow(y,-z). */
11802 if (fcode1 == BUILT_IN_POW
11803 || fcode1 == BUILT_IN_POWF
11804 || fcode1 == BUILT_IN_POWL)
11805 {
11806 tree powfn = TREE_OPERAND (CALL_EXPR_FN (arg1), 0);
11807 tree arg10 = CALL_EXPR_ARG (arg1, 0);
11808 tree arg11 = CALL_EXPR_ARG (arg1, 1);
11809 tree neg11 = fold_convert_loc (loc, type,
11810 negate_expr (arg11));
11811 arg1 = build_call_expr_loc (loc, powfn, 2, arg10, neg11);
11812 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11813 }
11814 }
11815 return NULL_TREE;
11816
11817 case TRUNC_DIV_EXPR:
11818 /* Optimize (X & (-A)) / A where A is a power of 2,
11819 to X >> log2(A) */
11820 if (TREE_CODE (arg0) == BIT_AND_EXPR
11821 && !TYPE_UNSIGNED (type) && TREE_CODE (arg1) == INTEGER_CST
11822 && integer_pow2p (arg1) && tree_int_cst_sgn (arg1) > 0)
11823 {
11824 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (arg1),
11825 arg1, TREE_OPERAND (arg0, 1));
11826 if (sum && integer_zerop (sum)) {
11827 tree pow2 = build_int_cst (integer_type_node,
11828 wi::exact_log2 (arg1));
11829 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11830 TREE_OPERAND (arg0, 0), pow2);
11831 }
11832 }
11833
11834 /* Fall through */
11835
11836 case FLOOR_DIV_EXPR:
11837 /* Simplify A / (B << N) where A and B are positive and B is
11838 a power of 2, to A >> (N + log2(B)). */
11839 strict_overflow_p = false;
11840 if (TREE_CODE (arg1) == LSHIFT_EXPR
11841 && (TYPE_UNSIGNED (type)
11842 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
11843 {
11844 tree sval = TREE_OPERAND (arg1, 0);
11845 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
11846 {
11847 tree sh_cnt = TREE_OPERAND (arg1, 1);
11848 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
11849 wi::exact_log2 (sval));
11850
11851 if (strict_overflow_p)
11852 fold_overflow_warning (("assuming signed overflow does not "
11853 "occur when simplifying A / (B << N)"),
11854 WARN_STRICT_OVERFLOW_MISC);
11855
11856 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
11857 sh_cnt, pow2);
11858 return fold_build2_loc (loc, RSHIFT_EXPR, type,
11859 fold_convert_loc (loc, type, arg0), sh_cnt);
11860 }
11861 }
11862
11863 /* Fall through */
11864
11865 case ROUND_DIV_EXPR:
11866 case CEIL_DIV_EXPR:
11867 case EXACT_DIV_EXPR:
11868 if (integer_zerop (arg1))
11869 return NULL_TREE;
11870
11871 /* Convert -A / -B to A / B when the type is signed and overflow is
11872 undefined. */
11873 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11874 && TREE_CODE (arg0) == NEGATE_EXPR
11875 && negate_expr_p (arg1))
11876 {
11877 if (INTEGRAL_TYPE_P (type))
11878 fold_overflow_warning (("assuming signed overflow does not occur "
11879 "when distributing negation across "
11880 "division"),
11881 WARN_STRICT_OVERFLOW_MISC);
11882 return fold_build2_loc (loc, code, type,
11883 fold_convert_loc (loc, type,
11884 TREE_OPERAND (arg0, 0)),
11885 fold_convert_loc (loc, type,
11886 negate_expr (arg1)));
11887 }
11888 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
11889 && TREE_CODE (arg1) == NEGATE_EXPR
11890 && negate_expr_p (arg0))
11891 {
11892 if (INTEGRAL_TYPE_P (type))
11893 fold_overflow_warning (("assuming signed overflow does not occur "
11894 "when distributing negation across "
11895 "division"),
11896 WARN_STRICT_OVERFLOW_MISC);
11897 return fold_build2_loc (loc, code, type,
11898 fold_convert_loc (loc, type,
11899 negate_expr (arg0)),
11900 fold_convert_loc (loc, type,
11901 TREE_OPERAND (arg1, 0)));
11902 }
11903
11904 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
11905 operation, EXACT_DIV_EXPR.
11906
11907 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
11908 At one time others generated faster code, it's not clear if they do
11909 after the last round to changes to the DIV code in expmed.c. */
11910 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
11911 && multiple_of_p (type, arg0, arg1))
11912 return fold_build2_loc (loc, EXACT_DIV_EXPR, type, arg0, arg1);
11913
11914 strict_overflow_p = false;
11915 if (TREE_CODE (arg1) == INTEGER_CST
11916 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11917 &strict_overflow_p)))
11918 {
11919 if (strict_overflow_p)
11920 fold_overflow_warning (("assuming signed overflow does not occur "
11921 "when simplifying division"),
11922 WARN_STRICT_OVERFLOW_MISC);
11923 return fold_convert_loc (loc, type, tem);
11924 }
11925
11926 return NULL_TREE;
11927
11928 case CEIL_MOD_EXPR:
11929 case FLOOR_MOD_EXPR:
11930 case ROUND_MOD_EXPR:
11931 case TRUNC_MOD_EXPR:
11932 strict_overflow_p = false;
11933 if (TREE_CODE (arg1) == INTEGER_CST
11934 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
11935 &strict_overflow_p)))
11936 {
11937 if (strict_overflow_p)
11938 fold_overflow_warning (("assuming signed overflow does not occur "
11939 "when simplifying modulus"),
11940 WARN_STRICT_OVERFLOW_MISC);
11941 return fold_convert_loc (loc, type, tem);
11942 }
11943
11944 return NULL_TREE;
11945
11946 case LROTATE_EXPR:
11947 case RROTATE_EXPR:
11948 case RSHIFT_EXPR:
11949 case LSHIFT_EXPR:
11950 /* Since negative shift count is not well-defined,
11951 don't try to compute it in the compiler. */
11952 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
11953 return NULL_TREE;
11954
11955 prec = element_precision (type);
11956
11957 /* Turn (a OP c1) OP c2 into a OP (c1+c2). */
11958 if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1)
11959 && tree_to_uhwi (arg1) < prec
11960 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11961 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11962 {
11963 unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1))
11964 + tree_to_uhwi (arg1));
11965
11966 /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2
11967 being well defined. */
11968 if (low >= prec)
11969 {
11970 if (code == LROTATE_EXPR || code == RROTATE_EXPR)
11971 low = low % prec;
11972 else if (TYPE_UNSIGNED (type) || code == LSHIFT_EXPR)
11973 return omit_one_operand_loc (loc, type, build_zero_cst (type),
11974 TREE_OPERAND (arg0, 0));
11975 else
11976 low = prec - 1;
11977 }
11978
11979 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
11980 build_int_cst (TREE_TYPE (arg1), low));
11981 }
11982
11983 /* Transform (x >> c) << c into x & (-1<<c), or transform (x << c) >> c
11984 into x & ((unsigned)-1 >> c) for unsigned types. */
11985 if (((code == LSHIFT_EXPR && TREE_CODE (arg0) == RSHIFT_EXPR)
11986 || (TYPE_UNSIGNED (type)
11987 && code == RSHIFT_EXPR && TREE_CODE (arg0) == LSHIFT_EXPR))
11988 && tree_fits_uhwi_p (arg1)
11989 && tree_to_uhwi (arg1) < prec
11990 && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1))
11991 && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec)
11992 {
11993 HOST_WIDE_INT low0 = tree_to_uhwi (TREE_OPERAND (arg0, 1));
11994 HOST_WIDE_INT low1 = tree_to_uhwi (arg1);
11995 tree lshift;
11996 tree arg00;
11997
11998 if (low0 == low1)
11999 {
12000 arg00 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12001
12002 lshift = build_minus_one_cst (type);
12003 lshift = const_binop (code, lshift, arg1);
12004
12005 return fold_build2_loc (loc, BIT_AND_EXPR, type, arg00, lshift);
12006 }
12007 }
12008
12009 /* If we have a rotate of a bit operation with the rotate count and
12010 the second operand of the bit operation both constant,
12011 permute the two operations. */
12012 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12013 && (TREE_CODE (arg0) == BIT_AND_EXPR
12014 || TREE_CODE (arg0) == BIT_IOR_EXPR
12015 || TREE_CODE (arg0) == BIT_XOR_EXPR)
12016 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12017 return fold_build2_loc (loc, TREE_CODE (arg0), type,
12018 fold_build2_loc (loc, code, type,
12019 TREE_OPERAND (arg0, 0), arg1),
12020 fold_build2_loc (loc, code, type,
12021 TREE_OPERAND (arg0, 1), arg1));
12022
12023 /* Two consecutive rotates adding up to the some integer
12024 multiple of the precision of the type can be ignored. */
12025 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
12026 && TREE_CODE (arg0) == RROTATE_EXPR
12027 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12028 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
12029 prec) == 0)
12030 return TREE_OPERAND (arg0, 0);
12031
12032 /* Fold (X & C2) << C1 into (X << C1) & (C2 << C1)
12033 (X & C2) >> C1 into (X >> C1) & (C2 >> C1)
12034 if the latter can be further optimized. */
12035 if ((code == LSHIFT_EXPR || code == RSHIFT_EXPR)
12036 && TREE_CODE (arg0) == BIT_AND_EXPR
12037 && TREE_CODE (arg1) == INTEGER_CST
12038 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12039 {
12040 tree mask = fold_build2_loc (loc, code, type,
12041 fold_convert_loc (loc, type,
12042 TREE_OPERAND (arg0, 1)),
12043 arg1);
12044 tree shift = fold_build2_loc (loc, code, type,
12045 fold_convert_loc (loc, type,
12046 TREE_OPERAND (arg0, 0)),
12047 arg1);
12048 tem = fold_binary_loc (loc, BIT_AND_EXPR, type, shift, mask);
12049 if (tem)
12050 return tem;
12051 }
12052
12053 return NULL_TREE;
12054
12055 case MIN_EXPR:
12056 tem = fold_minmax (loc, MIN_EXPR, type, arg0, arg1);
12057 if (tem)
12058 return tem;
12059 goto associate;
12060
12061 case MAX_EXPR:
12062 tem = fold_minmax (loc, MAX_EXPR, type, arg0, arg1);
12063 if (tem)
12064 return tem;
12065 goto associate;
12066
12067 case TRUTH_ANDIF_EXPR:
12068 /* Note that the operands of this must be ints
12069 and their values must be 0 or 1.
12070 ("true" is a fixed value perhaps depending on the language.) */
12071 /* If first arg is constant zero, return it. */
12072 if (integer_zerop (arg0))
12073 return fold_convert_loc (loc, type, arg0);
12074 case TRUTH_AND_EXPR:
12075 /* If either arg is constant true, drop it. */
12076 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12077 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12078 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
12079 /* Preserve sequence points. */
12080 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12081 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12082 /* If second arg is constant zero, result is zero, but first arg
12083 must be evaluated. */
12084 if (integer_zerop (arg1))
12085 return omit_one_operand_loc (loc, type, arg1, arg0);
12086 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
12087 case will be handled here. */
12088 if (integer_zerop (arg0))
12089 return omit_one_operand_loc (loc, type, arg0, arg1);
12090
12091 /* !X && X is always false. */
12092 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12093 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12094 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
12095 /* X && !X is always false. */
12096 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12097 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12098 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12099
12100 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
12101 means A >= Y && A != MAX, but in this case we know that
12102 A < X <= MAX. */
12103
12104 if (!TREE_SIDE_EFFECTS (arg0)
12105 && !TREE_SIDE_EFFECTS (arg1))
12106 {
12107 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
12108 if (tem && !operand_equal_p (tem, arg0, 0))
12109 return fold_build2_loc (loc, code, type, tem, arg1);
12110
12111 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
12112 if (tem && !operand_equal_p (tem, arg1, 0))
12113 return fold_build2_loc (loc, code, type, arg0, tem);
12114 }
12115
12116 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12117 != NULL_TREE)
12118 return tem;
12119
12120 return NULL_TREE;
12121
12122 case TRUTH_ORIF_EXPR:
12123 /* Note that the operands of this must be ints
12124 and their values must be 0 or true.
12125 ("true" is a fixed value perhaps depending on the language.) */
12126 /* If first arg is constant true, return it. */
12127 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12128 return fold_convert_loc (loc, type, arg0);
12129 case TRUTH_OR_EXPR:
12130 /* If either arg is constant zero, drop it. */
12131 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
12132 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
12133 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
12134 /* Preserve sequence points. */
12135 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
12136 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12137 /* If second arg is constant true, result is true, but we must
12138 evaluate first arg. */
12139 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
12140 return omit_one_operand_loc (loc, type, arg1, arg0);
12141 /* Likewise for first arg, but note this only occurs here for
12142 TRUTH_OR_EXPR. */
12143 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
12144 return omit_one_operand_loc (loc, type, arg0, arg1);
12145
12146 /* !X || X is always true. */
12147 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12148 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12149 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12150 /* X || !X is always true. */
12151 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12152 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12153 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12154
12155 /* (X && !Y) || (!X && Y) is X ^ Y */
12156 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
12157 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
12158 {
12159 tree a0, a1, l0, l1, n0, n1;
12160
12161 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
12162 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
12163
12164 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
12165 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
12166
12167 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
12168 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
12169
12170 if ((operand_equal_p (n0, a0, 0)
12171 && operand_equal_p (n1, a1, 0))
12172 || (operand_equal_p (n0, a1, 0)
12173 && operand_equal_p (n1, a0, 0)))
12174 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
12175 }
12176
12177 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
12178 != NULL_TREE)
12179 return tem;
12180
12181 return NULL_TREE;
12182
12183 case TRUTH_XOR_EXPR:
12184 /* If the second arg is constant zero, drop it. */
12185 if (integer_zerop (arg1))
12186 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12187 /* If the second arg is constant true, this is a logical inversion. */
12188 if (integer_onep (arg1))
12189 {
12190 tem = invert_truthvalue_loc (loc, arg0);
12191 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
12192 }
12193 /* Identical arguments cancel to zero. */
12194 if (operand_equal_p (arg0, arg1, 0))
12195 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12196
12197 /* !X ^ X is always true. */
12198 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
12199 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
12200 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
12201
12202 /* X ^ !X is always true. */
12203 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
12204 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
12205 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12206
12207 return NULL_TREE;
12208
12209 case EQ_EXPR:
12210 case NE_EXPR:
12211 STRIP_NOPS (arg0);
12212 STRIP_NOPS (arg1);
12213
12214 tem = fold_comparison (loc, code, type, op0, op1);
12215 if (tem != NULL_TREE)
12216 return tem;
12217
12218 /* bool_var != 0 becomes bool_var. */
12219 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12220 && code == NE_EXPR)
12221 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12222
12223 /* bool_var == 1 becomes bool_var. */
12224 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12225 && code == EQ_EXPR)
12226 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12227
12228 /* bool_var != 1 becomes !bool_var. */
12229 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
12230 && code == NE_EXPR)
12231 return fold_convert_loc (loc, type,
12232 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12233 TREE_TYPE (arg0), arg0));
12234
12235 /* bool_var == 0 becomes !bool_var. */
12236 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
12237 && code == EQ_EXPR)
12238 return fold_convert_loc (loc, type,
12239 fold_build1_loc (loc, TRUTH_NOT_EXPR,
12240 TREE_TYPE (arg0), arg0));
12241
12242 /* !exp != 0 becomes !exp */
12243 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
12244 && code == NE_EXPR)
12245 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
12246
12247 /* If this is an equality comparison of the address of two non-weak,
12248 unaliased symbols neither of which are extern (since we do not
12249 have access to attributes for externs), then we know the result. */
12250 if (TREE_CODE (arg0) == ADDR_EXPR
12251 && DECL_P (TREE_OPERAND (arg0, 0))
12252 && TREE_CODE (arg1) == ADDR_EXPR
12253 && DECL_P (TREE_OPERAND (arg1, 0)))
12254 {
12255 int equal;
12256
12257 if (decl_in_symtab_p (TREE_OPERAND (arg0, 0))
12258 && decl_in_symtab_p (TREE_OPERAND (arg1, 0)))
12259 equal = symtab_node::get_create (TREE_OPERAND (arg0, 0))
12260 ->equal_address_to (symtab_node::get_create
12261 (TREE_OPERAND (arg1, 0)));
12262 else
12263 equal = TREE_OPERAND (arg0, 0) == TREE_OPERAND (arg1, 0);
12264 if (equal != 2)
12265 return constant_boolean_node (equal
12266 ? code == EQ_EXPR : code != EQ_EXPR,
12267 type);
12268 }
12269
12270 /* Similarly for a NEGATE_EXPR. */
12271 if (TREE_CODE (arg0) == NEGATE_EXPR
12272 && TREE_CODE (arg1) == INTEGER_CST
12273 && 0 != (tem = negate_expr (fold_convert_loc (loc, TREE_TYPE (arg0),
12274 arg1)))
12275 && TREE_CODE (tem) == INTEGER_CST
12276 && !TREE_OVERFLOW (tem))
12277 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
12278
12279 /* Similarly for a BIT_XOR_EXPR; X ^ C1 == C2 is X == (C1 ^ C2). */
12280 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12281 && TREE_CODE (arg1) == INTEGER_CST
12282 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12283 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12284 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg0),
12285 fold_convert_loc (loc,
12286 TREE_TYPE (arg0),
12287 arg1),
12288 TREE_OPERAND (arg0, 1)));
12289
12290 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
12291 if ((TREE_CODE (arg0) == PLUS_EXPR
12292 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
12293 || TREE_CODE (arg0) == MINUS_EXPR)
12294 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12295 0)),
12296 arg1, 0)
12297 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
12298 || POINTER_TYPE_P (TREE_TYPE (arg0))))
12299 {
12300 tree val = TREE_OPERAND (arg0, 1);
12301 return omit_two_operands_loc (loc, type,
12302 fold_build2_loc (loc, code, type,
12303 val,
12304 build_int_cst (TREE_TYPE (val),
12305 0)),
12306 TREE_OPERAND (arg0, 0), arg1);
12307 }
12308
12309 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
12310 if (TREE_CODE (arg0) == MINUS_EXPR
12311 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
12312 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
12313 1)),
12314 arg1, 0)
12315 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
12316 {
12317 return omit_two_operands_loc (loc, type,
12318 code == NE_EXPR
12319 ? boolean_true_node : boolean_false_node,
12320 TREE_OPERAND (arg0, 1), arg1);
12321 }
12322
12323 /* Convert ABS_EXPR<x> == 0 or ABS_EXPR<x> != 0 to x == 0 or x != 0. */
12324 if (TREE_CODE (arg0) == ABS_EXPR
12325 && (integer_zerop (arg1) || real_zerop (arg1)))
12326 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), arg1);
12327
12328 /* If this is an EQ or NE comparison with zero and ARG0 is
12329 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
12330 two operations, but the latter can be done in one less insn
12331 on machines that have only two-operand insns or on which a
12332 constant cannot be the first operand. */
12333 if (TREE_CODE (arg0) == BIT_AND_EXPR
12334 && integer_zerop (arg1))
12335 {
12336 tree arg00 = TREE_OPERAND (arg0, 0);
12337 tree arg01 = TREE_OPERAND (arg0, 1);
12338 if (TREE_CODE (arg00) == LSHIFT_EXPR
12339 && integer_onep (TREE_OPERAND (arg00, 0)))
12340 {
12341 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
12342 arg01, TREE_OPERAND (arg00, 1));
12343 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12344 build_int_cst (TREE_TYPE (arg0), 1));
12345 return fold_build2_loc (loc, code, type,
12346 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12347 arg1);
12348 }
12349 else if (TREE_CODE (arg01) == LSHIFT_EXPR
12350 && integer_onep (TREE_OPERAND (arg01, 0)))
12351 {
12352 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
12353 arg00, TREE_OPERAND (arg01, 1));
12354 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
12355 build_int_cst (TREE_TYPE (arg0), 1));
12356 return fold_build2_loc (loc, code, type,
12357 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
12358 arg1);
12359 }
12360 }
12361
12362 /* If this is an NE or EQ comparison of zero against the result of a
12363 signed MOD operation whose second operand is a power of 2, make
12364 the MOD operation unsigned since it is simpler and equivalent. */
12365 if (integer_zerop (arg1)
12366 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
12367 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
12368 || TREE_CODE (arg0) == CEIL_MOD_EXPR
12369 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
12370 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
12371 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12372 {
12373 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
12374 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
12375 fold_convert_loc (loc, newtype,
12376 TREE_OPERAND (arg0, 0)),
12377 fold_convert_loc (loc, newtype,
12378 TREE_OPERAND (arg0, 1)));
12379
12380 return fold_build2_loc (loc, code, type, newmod,
12381 fold_convert_loc (loc, newtype, arg1));
12382 }
12383
12384 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
12385 C1 is a valid shift constant, and C2 is a power of two, i.e.
12386 a single bit. */
12387 if (TREE_CODE (arg0) == BIT_AND_EXPR
12388 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
12389 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
12390 == INTEGER_CST
12391 && integer_pow2p (TREE_OPERAND (arg0, 1))
12392 && integer_zerop (arg1))
12393 {
12394 tree itype = TREE_TYPE (arg0);
12395 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
12396 prec = TYPE_PRECISION (itype);
12397
12398 /* Check for a valid shift count. */
12399 if (wi::ltu_p (arg001, prec))
12400 {
12401 tree arg01 = TREE_OPERAND (arg0, 1);
12402 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12403 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
12404 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
12405 can be rewritten as (X & (C2 << C1)) != 0. */
12406 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
12407 {
12408 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
12409 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
12410 return fold_build2_loc (loc, code, type, tem,
12411 fold_convert_loc (loc, itype, arg1));
12412 }
12413 /* Otherwise, for signed (arithmetic) shifts,
12414 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
12415 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
12416 else if (!TYPE_UNSIGNED (itype))
12417 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
12418 arg000, build_int_cst (itype, 0));
12419 /* Otherwise, of unsigned (logical) shifts,
12420 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
12421 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
12422 else
12423 return omit_one_operand_loc (loc, type,
12424 code == EQ_EXPR ? integer_one_node
12425 : integer_zero_node,
12426 arg000);
12427 }
12428 }
12429
12430 /* If we have (A & C) == C where C is a power of 2, convert this into
12431 (A & C) != 0. Similarly for NE_EXPR. */
12432 if (TREE_CODE (arg0) == BIT_AND_EXPR
12433 && integer_pow2p (TREE_OPERAND (arg0, 1))
12434 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12435 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12436 arg0, fold_convert_loc (loc, TREE_TYPE (arg0),
12437 integer_zero_node));
12438
12439 /* If we have (A & C) != 0 or (A & C) == 0 and C is the sign
12440 bit, then fold the expression into A < 0 or A >= 0. */
12441 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1, type);
12442 if (tem)
12443 return tem;
12444
12445 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
12446 Similarly for NE_EXPR. */
12447 if (TREE_CODE (arg0) == BIT_AND_EXPR
12448 && TREE_CODE (arg1) == INTEGER_CST
12449 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12450 {
12451 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
12452 TREE_TYPE (TREE_OPERAND (arg0, 1)),
12453 TREE_OPERAND (arg0, 1));
12454 tree dandnotc
12455 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12456 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
12457 notc);
12458 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12459 if (integer_nonzerop (dandnotc))
12460 return omit_one_operand_loc (loc, type, rslt, arg0);
12461 }
12462
12463 /* If we have (A | C) == D where C & ~D != 0, convert this into 0.
12464 Similarly for NE_EXPR. */
12465 if (TREE_CODE (arg0) == BIT_IOR_EXPR
12466 && TREE_CODE (arg1) == INTEGER_CST
12467 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12468 {
12469 tree notd = fold_build1_loc (loc, BIT_NOT_EXPR, TREE_TYPE (arg1), arg1);
12470 tree candnotd
12471 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12472 TREE_OPERAND (arg0, 1),
12473 fold_convert_loc (loc, TREE_TYPE (arg0), notd));
12474 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
12475 if (integer_nonzerop (candnotd))
12476 return omit_one_operand_loc (loc, type, rslt, arg0);
12477 }
12478
12479 /* If this is a comparison of a field, we may be able to simplify it. */
12480 if ((TREE_CODE (arg0) == COMPONENT_REF
12481 || TREE_CODE (arg0) == BIT_FIELD_REF)
12482 /* Handle the constant case even without -O
12483 to make sure the warnings are given. */
12484 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
12485 {
12486 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
12487 if (t1)
12488 return t1;
12489 }
12490
12491 /* Optimize comparisons of strlen vs zero to a compare of the
12492 first character of the string vs zero. To wit,
12493 strlen(ptr) == 0 => *ptr == 0
12494 strlen(ptr) != 0 => *ptr != 0
12495 Other cases should reduce to one of these two (or a constant)
12496 due to the return value of strlen being unsigned. */
12497 if (TREE_CODE (arg0) == CALL_EXPR
12498 && integer_zerop (arg1))
12499 {
12500 tree fndecl = get_callee_fndecl (arg0);
12501
12502 if (fndecl
12503 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
12504 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
12505 && call_expr_nargs (arg0) == 1
12506 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
12507 {
12508 tree iref = build_fold_indirect_ref_loc (loc,
12509 CALL_EXPR_ARG (arg0, 0));
12510 return fold_build2_loc (loc, code, type, iref,
12511 build_int_cst (TREE_TYPE (iref), 0));
12512 }
12513 }
12514
12515 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
12516 of X. Similarly fold (X >> C) == 0 into X >= 0. */
12517 if (TREE_CODE (arg0) == RSHIFT_EXPR
12518 && integer_zerop (arg1)
12519 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12520 {
12521 tree arg00 = TREE_OPERAND (arg0, 0);
12522 tree arg01 = TREE_OPERAND (arg0, 1);
12523 tree itype = TREE_TYPE (arg00);
12524 if (wi::eq_p (arg01, element_precision (itype) - 1))
12525 {
12526 if (TYPE_UNSIGNED (itype))
12527 {
12528 itype = signed_type_for (itype);
12529 arg00 = fold_convert_loc (loc, itype, arg00);
12530 }
12531 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
12532 type, arg00, build_zero_cst (itype));
12533 }
12534 }
12535
12536 /* (X ^ Y) == 0 becomes X == Y, and (X ^ Y) != 0 becomes X != Y. */
12537 if (integer_zerop (arg1)
12538 && TREE_CODE (arg0) == BIT_XOR_EXPR)
12539 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12540 TREE_OPERAND (arg0, 1));
12541
12542 /* (X ^ Y) == Y becomes X == 0. We know that Y has no side-effects. */
12543 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12544 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
12545 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12546 build_zero_cst (TREE_TYPE (arg0)));
12547 /* Likewise (X ^ Y) == X becomes Y == 0. X has no side-effects. */
12548 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12550 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
12551 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 1),
12552 build_zero_cst (TREE_TYPE (arg0)));
12553
12554 /* (X ^ C1) op C2 can be rewritten as X op (C1 ^ C2). */
12555 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12556 && TREE_CODE (arg1) == INTEGER_CST
12557 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
12558 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0),
12559 fold_build2_loc (loc, BIT_XOR_EXPR, TREE_TYPE (arg1),
12560 TREE_OPERAND (arg0, 1), arg1));
12561
12562 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
12563 (X & C) == 0 when C is a single bit. */
12564 if (TREE_CODE (arg0) == BIT_AND_EXPR
12565 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
12566 && integer_zerop (arg1)
12567 && integer_pow2p (TREE_OPERAND (arg0, 1)))
12568 {
12569 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
12570 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
12571 TREE_OPERAND (arg0, 1));
12572 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
12573 type, tem,
12574 fold_convert_loc (loc, TREE_TYPE (arg0),
12575 arg1));
12576 }
12577
12578 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
12579 constant C is a power of two, i.e. a single bit. */
12580 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12581 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
12582 && integer_zerop (arg1)
12583 && integer_pow2p (TREE_OPERAND (arg0, 1))
12584 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12585 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12586 {
12587 tree arg00 = TREE_OPERAND (arg0, 0);
12588 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12589 arg00, build_int_cst (TREE_TYPE (arg00), 0));
12590 }
12591
12592 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
12593 when is C is a power of two, i.e. a single bit. */
12594 if (TREE_CODE (arg0) == BIT_AND_EXPR
12595 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
12596 && integer_zerop (arg1)
12597 && integer_pow2p (TREE_OPERAND (arg0, 1))
12598 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
12599 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
12600 {
12601 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
12602 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
12603 arg000, TREE_OPERAND (arg0, 1));
12604 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
12605 tem, build_int_cst (TREE_TYPE (tem), 0));
12606 }
12607
12608 if (integer_zerop (arg1)
12609 && tree_expr_nonzero_p (arg0))
12610 {
12611 tree res = constant_boolean_node (code==NE_EXPR, type);
12612 return omit_one_operand_loc (loc, type, res, arg0);
12613 }
12614
12615 /* Fold -X op -Y as X op Y, where op is eq/ne. */
12616 if (TREE_CODE (arg0) == NEGATE_EXPR
12617 && TREE_CODE (arg1) == NEGATE_EXPR)
12618 return fold_build2_loc (loc, code, type,
12619 TREE_OPERAND (arg0, 0),
12620 fold_convert_loc (loc, TREE_TYPE (arg0),
12621 TREE_OPERAND (arg1, 0)));
12622
12623 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
12624 if (TREE_CODE (arg0) == BIT_AND_EXPR
12625 && TREE_CODE (arg1) == BIT_AND_EXPR)
12626 {
12627 tree arg00 = TREE_OPERAND (arg0, 0);
12628 tree arg01 = TREE_OPERAND (arg0, 1);
12629 tree arg10 = TREE_OPERAND (arg1, 0);
12630 tree arg11 = TREE_OPERAND (arg1, 1);
12631 tree itype = TREE_TYPE (arg0);
12632
12633 if (operand_equal_p (arg01, arg11, 0))
12634 return fold_build2_loc (loc, code, type,
12635 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12636 fold_build2_loc (loc,
12637 BIT_XOR_EXPR, itype,
12638 arg00, arg10),
12639 arg01),
12640 build_zero_cst (itype));
12641
12642 if (operand_equal_p (arg01, arg10, 0))
12643 return fold_build2_loc (loc, code, type,
12644 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12645 fold_build2_loc (loc,
12646 BIT_XOR_EXPR, itype,
12647 arg00, arg11),
12648 arg01),
12649 build_zero_cst (itype));
12650
12651 if (operand_equal_p (arg00, arg11, 0))
12652 return fold_build2_loc (loc, code, type,
12653 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12654 fold_build2_loc (loc,
12655 BIT_XOR_EXPR, itype,
12656 arg01, arg10),
12657 arg00),
12658 build_zero_cst (itype));
12659
12660 if (operand_equal_p (arg00, arg10, 0))
12661 return fold_build2_loc (loc, code, type,
12662 fold_build2_loc (loc, BIT_AND_EXPR, itype,
12663 fold_build2_loc (loc,
12664 BIT_XOR_EXPR, itype,
12665 arg01, arg11),
12666 arg00),
12667 build_zero_cst (itype));
12668 }
12669
12670 if (TREE_CODE (arg0) == BIT_XOR_EXPR
12671 && TREE_CODE (arg1) == BIT_XOR_EXPR)
12672 {
12673 tree arg00 = TREE_OPERAND (arg0, 0);
12674 tree arg01 = TREE_OPERAND (arg0, 1);
12675 tree arg10 = TREE_OPERAND (arg1, 0);
12676 tree arg11 = TREE_OPERAND (arg1, 1);
12677 tree itype = TREE_TYPE (arg0);
12678
12679 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
12680 operand_equal_p guarantees no side-effects so we don't need
12681 to use omit_one_operand on Z. */
12682 if (operand_equal_p (arg01, arg11, 0))
12683 return fold_build2_loc (loc, code, type, arg00,
12684 fold_convert_loc (loc, TREE_TYPE (arg00),
12685 arg10));
12686 if (operand_equal_p (arg01, arg10, 0))
12687 return fold_build2_loc (loc, code, type, arg00,
12688 fold_convert_loc (loc, TREE_TYPE (arg00),
12689 arg11));
12690 if (operand_equal_p (arg00, arg11, 0))
12691 return fold_build2_loc (loc, code, type, arg01,
12692 fold_convert_loc (loc, TREE_TYPE (arg01),
12693 arg10));
12694 if (operand_equal_p (arg00, arg10, 0))
12695 return fold_build2_loc (loc, code, type, arg01,
12696 fold_convert_loc (loc, TREE_TYPE (arg01),
12697 arg11));
12698
12699 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
12700 if (TREE_CODE (arg01) == INTEGER_CST
12701 && TREE_CODE (arg11) == INTEGER_CST)
12702 {
12703 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
12704 fold_convert_loc (loc, itype, arg11));
12705 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
12706 return fold_build2_loc (loc, code, type, tem,
12707 fold_convert_loc (loc, itype, arg10));
12708 }
12709 }
12710
12711 /* Attempt to simplify equality/inequality comparisons of complex
12712 values. Only lower the comparison if the result is known or
12713 can be simplified to a single scalar comparison. */
12714 if ((TREE_CODE (arg0) == COMPLEX_EXPR
12715 || TREE_CODE (arg0) == COMPLEX_CST)
12716 && (TREE_CODE (arg1) == COMPLEX_EXPR
12717 || TREE_CODE (arg1) == COMPLEX_CST))
12718 {
12719 tree real0, imag0, real1, imag1;
12720 tree rcond, icond;
12721
12722 if (TREE_CODE (arg0) == COMPLEX_EXPR)
12723 {
12724 real0 = TREE_OPERAND (arg0, 0);
12725 imag0 = TREE_OPERAND (arg0, 1);
12726 }
12727 else
12728 {
12729 real0 = TREE_REALPART (arg0);
12730 imag0 = TREE_IMAGPART (arg0);
12731 }
12732
12733 if (TREE_CODE (arg1) == COMPLEX_EXPR)
12734 {
12735 real1 = TREE_OPERAND (arg1, 0);
12736 imag1 = TREE_OPERAND (arg1, 1);
12737 }
12738 else
12739 {
12740 real1 = TREE_REALPART (arg1);
12741 imag1 = TREE_IMAGPART (arg1);
12742 }
12743
12744 rcond = fold_binary_loc (loc, code, type, real0, real1);
12745 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
12746 {
12747 if (integer_zerop (rcond))
12748 {
12749 if (code == EQ_EXPR)
12750 return omit_two_operands_loc (loc, type, boolean_false_node,
12751 imag0, imag1);
12752 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
12753 }
12754 else
12755 {
12756 if (code == NE_EXPR)
12757 return omit_two_operands_loc (loc, type, boolean_true_node,
12758 imag0, imag1);
12759 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
12760 }
12761 }
12762
12763 icond = fold_binary_loc (loc, code, type, imag0, imag1);
12764 if (icond && TREE_CODE (icond) == INTEGER_CST)
12765 {
12766 if (integer_zerop (icond))
12767 {
12768 if (code == EQ_EXPR)
12769 return omit_two_operands_loc (loc, type, boolean_false_node,
12770 real0, real1);
12771 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
12772 }
12773 else
12774 {
12775 if (code == NE_EXPR)
12776 return omit_two_operands_loc (loc, type, boolean_true_node,
12777 real0, real1);
12778 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
12779 }
12780 }
12781 }
12782
12783 return NULL_TREE;
12784
12785 case LT_EXPR:
12786 case GT_EXPR:
12787 case LE_EXPR:
12788 case GE_EXPR:
12789 tem = fold_comparison (loc, code, type, op0, op1);
12790 if (tem != NULL_TREE)
12791 return tem;
12792
12793 /* Transform comparisons of the form X +- C CMP X. */
12794 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
12795 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
12796 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
12797 && !HONOR_SNANS (arg0))
12798 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
12799 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
12800 {
12801 tree arg01 = TREE_OPERAND (arg0, 1);
12802 enum tree_code code0 = TREE_CODE (arg0);
12803 int is_positive;
12804
12805 if (TREE_CODE (arg01) == REAL_CST)
12806 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
12807 else
12808 is_positive = tree_int_cst_sgn (arg01);
12809
12810 /* (X - c) > X becomes false. */
12811 if (code == GT_EXPR
12812 && ((code0 == MINUS_EXPR && is_positive >= 0)
12813 || (code0 == PLUS_EXPR && is_positive <= 0)))
12814 {
12815 if (TREE_CODE (arg01) == INTEGER_CST
12816 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12817 fold_overflow_warning (("assuming signed overflow does not "
12818 "occur when assuming that (X - c) > X "
12819 "is always false"),
12820 WARN_STRICT_OVERFLOW_ALL);
12821 return constant_boolean_node (0, type);
12822 }
12823
12824 /* Likewise (X + c) < X becomes false. */
12825 if (code == LT_EXPR
12826 && ((code0 == PLUS_EXPR && is_positive >= 0)
12827 || (code0 == MINUS_EXPR && is_positive <= 0)))
12828 {
12829 if (TREE_CODE (arg01) == INTEGER_CST
12830 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12831 fold_overflow_warning (("assuming signed overflow does not "
12832 "occur when assuming that "
12833 "(X + c) < X is always false"),
12834 WARN_STRICT_OVERFLOW_ALL);
12835 return constant_boolean_node (0, type);
12836 }
12837
12838 /* Convert (X - c) <= X to true. */
12839 if (!HONOR_NANS (arg1)
12840 && code == LE_EXPR
12841 && ((code0 == MINUS_EXPR && is_positive >= 0)
12842 || (code0 == PLUS_EXPR && is_positive <= 0)))
12843 {
12844 if (TREE_CODE (arg01) == INTEGER_CST
12845 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12846 fold_overflow_warning (("assuming signed overflow does not "
12847 "occur when assuming that "
12848 "(X - c) <= X is always true"),
12849 WARN_STRICT_OVERFLOW_ALL);
12850 return constant_boolean_node (1, type);
12851 }
12852
12853 /* Convert (X + c) >= X to true. */
12854 if (!HONOR_NANS (arg1)
12855 && code == GE_EXPR
12856 && ((code0 == PLUS_EXPR && is_positive >= 0)
12857 || (code0 == MINUS_EXPR && is_positive <= 0)))
12858 {
12859 if (TREE_CODE (arg01) == INTEGER_CST
12860 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12861 fold_overflow_warning (("assuming signed overflow does not "
12862 "occur when assuming that "
12863 "(X + c) >= X is always true"),
12864 WARN_STRICT_OVERFLOW_ALL);
12865 return constant_boolean_node (1, type);
12866 }
12867
12868 if (TREE_CODE (arg01) == INTEGER_CST)
12869 {
12870 /* Convert X + c > X and X - c < X to true for integers. */
12871 if (code == GT_EXPR
12872 && ((code0 == PLUS_EXPR && is_positive > 0)
12873 || (code0 == MINUS_EXPR && is_positive < 0)))
12874 {
12875 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12876 fold_overflow_warning (("assuming signed overflow does "
12877 "not occur when assuming that "
12878 "(X + c) > X is always true"),
12879 WARN_STRICT_OVERFLOW_ALL);
12880 return constant_boolean_node (1, type);
12881 }
12882
12883 if (code == LT_EXPR
12884 && ((code0 == MINUS_EXPR && is_positive > 0)
12885 || (code0 == PLUS_EXPR && is_positive < 0)))
12886 {
12887 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12888 fold_overflow_warning (("assuming signed overflow does "
12889 "not occur when assuming that "
12890 "(X - c) < X is always true"),
12891 WARN_STRICT_OVERFLOW_ALL);
12892 return constant_boolean_node (1, type);
12893 }
12894
12895 /* Convert X + c <= X and X - c >= X to false for integers. */
12896 if (code == LE_EXPR
12897 && ((code0 == PLUS_EXPR && is_positive > 0)
12898 || (code0 == MINUS_EXPR && is_positive < 0)))
12899 {
12900 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12901 fold_overflow_warning (("assuming signed overflow does "
12902 "not occur when assuming that "
12903 "(X + c) <= X is always false"),
12904 WARN_STRICT_OVERFLOW_ALL);
12905 return constant_boolean_node (0, type);
12906 }
12907
12908 if (code == GE_EXPR
12909 && ((code0 == MINUS_EXPR && is_positive > 0)
12910 || (code0 == PLUS_EXPR && is_positive < 0)))
12911 {
12912 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
12913 fold_overflow_warning (("assuming signed overflow does "
12914 "not occur when assuming that "
12915 "(X - c) >= X is always false"),
12916 WARN_STRICT_OVERFLOW_ALL);
12917 return constant_boolean_node (0, type);
12918 }
12919 }
12920 }
12921
12922 /* Comparisons with the highest or lowest possible integer of
12923 the specified precision will have known values. */
12924 {
12925 tree arg1_type = TREE_TYPE (arg1);
12926 unsigned int prec = TYPE_PRECISION (arg1_type);
12927
12928 if (TREE_CODE (arg1) == INTEGER_CST
12929 && (INTEGRAL_TYPE_P (arg1_type) || POINTER_TYPE_P (arg1_type)))
12930 {
12931 wide_int max = wi::max_value (arg1_type);
12932 wide_int signed_max = wi::max_value (prec, SIGNED);
12933 wide_int min = wi::min_value (arg1_type);
12934
12935 if (wi::eq_p (arg1, max))
12936 switch (code)
12937 {
12938 case GT_EXPR:
12939 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12940
12941 case GE_EXPR:
12942 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12943
12944 case LE_EXPR:
12945 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12946
12947 case LT_EXPR:
12948 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12949
12950 /* The GE_EXPR and LT_EXPR cases above are not normally
12951 reached because of previous transformations. */
12952
12953 default:
12954 break;
12955 }
12956 else if (wi::eq_p (arg1, max - 1))
12957 switch (code)
12958 {
12959 case GT_EXPR:
12960 arg1 = const_binop (PLUS_EXPR, arg1,
12961 build_int_cst (TREE_TYPE (arg1), 1));
12962 return fold_build2_loc (loc, EQ_EXPR, type,
12963 fold_convert_loc (loc,
12964 TREE_TYPE (arg1), arg0),
12965 arg1);
12966 case LE_EXPR:
12967 arg1 = const_binop (PLUS_EXPR, arg1,
12968 build_int_cst (TREE_TYPE (arg1), 1));
12969 return fold_build2_loc (loc, NE_EXPR, type,
12970 fold_convert_loc (loc, TREE_TYPE (arg1),
12971 arg0),
12972 arg1);
12973 default:
12974 break;
12975 }
12976 else if (wi::eq_p (arg1, min))
12977 switch (code)
12978 {
12979 case LT_EXPR:
12980 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
12981
12982 case LE_EXPR:
12983 return fold_build2_loc (loc, EQ_EXPR, type, op0, op1);
12984
12985 case GE_EXPR:
12986 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
12987
12988 case GT_EXPR:
12989 return fold_build2_loc (loc, NE_EXPR, type, op0, op1);
12990
12991 default:
12992 break;
12993 }
12994 else if (wi::eq_p (arg1, min + 1))
12995 switch (code)
12996 {
12997 case GE_EXPR:
12998 arg1 = const_binop (MINUS_EXPR, arg1,
12999 build_int_cst (TREE_TYPE (arg1), 1));
13000 return fold_build2_loc (loc, NE_EXPR, type,
13001 fold_convert_loc (loc,
13002 TREE_TYPE (arg1), arg0),
13003 arg1);
13004 case LT_EXPR:
13005 arg1 = const_binop (MINUS_EXPR, arg1,
13006 build_int_cst (TREE_TYPE (arg1), 1));
13007 return fold_build2_loc (loc, EQ_EXPR, type,
13008 fold_convert_loc (loc, TREE_TYPE (arg1),
13009 arg0),
13010 arg1);
13011 default:
13012 break;
13013 }
13014
13015 else if (wi::eq_p (arg1, signed_max)
13016 && TYPE_UNSIGNED (arg1_type)
13017 /* We will flip the signedness of the comparison operator
13018 associated with the mode of arg1, so the sign bit is
13019 specified by this mode. Check that arg1 is the signed
13020 max associated with this sign bit. */
13021 && prec == GET_MODE_PRECISION (TYPE_MODE (arg1_type))
13022 /* signed_type does not work on pointer types. */
13023 && INTEGRAL_TYPE_P (arg1_type))
13024 {
13025 /* The following case also applies to X < signed_max+1
13026 and X >= signed_max+1 because previous transformations. */
13027 if (code == LE_EXPR || code == GT_EXPR)
13028 {
13029 tree st = signed_type_for (arg1_type);
13030 return fold_build2_loc (loc,
13031 code == LE_EXPR ? GE_EXPR : LT_EXPR,
13032 type, fold_convert_loc (loc, st, arg0),
13033 build_int_cst (st, 0));
13034 }
13035 }
13036 }
13037 }
13038
13039 /* If we are comparing an ABS_EXPR with a constant, we can
13040 convert all the cases into explicit comparisons, but they may
13041 well not be faster than doing the ABS and one comparison.
13042 But ABS (X) <= C is a range comparison, which becomes a subtraction
13043 and a comparison, and is probably faster. */
13044 if (code == LE_EXPR
13045 && TREE_CODE (arg1) == INTEGER_CST
13046 && TREE_CODE (arg0) == ABS_EXPR
13047 && ! TREE_SIDE_EFFECTS (arg0)
13048 && (0 != (tem = negate_expr (arg1)))
13049 && TREE_CODE (tem) == INTEGER_CST
13050 && !TREE_OVERFLOW (tem))
13051 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
13052 build2 (GE_EXPR, type,
13053 TREE_OPERAND (arg0, 0), tem),
13054 build2 (LE_EXPR, type,
13055 TREE_OPERAND (arg0, 0), arg1));
13056
13057 /* Convert ABS_EXPR<x> >= 0 to true. */
13058 strict_overflow_p = false;
13059 if (code == GE_EXPR
13060 && (integer_zerop (arg1)
13061 || (! HONOR_NANS (arg0)
13062 && real_zerop (arg1)))
13063 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13064 {
13065 if (strict_overflow_p)
13066 fold_overflow_warning (("assuming signed overflow does not occur "
13067 "when simplifying comparison of "
13068 "absolute value and zero"),
13069 WARN_STRICT_OVERFLOW_CONDITIONAL);
13070 return omit_one_operand_loc (loc, type,
13071 constant_boolean_node (true, type),
13072 arg0);
13073 }
13074
13075 /* Convert ABS_EXPR<x> < 0 to false. */
13076 strict_overflow_p = false;
13077 if (code == LT_EXPR
13078 && (integer_zerop (arg1) || real_zerop (arg1))
13079 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
13080 {
13081 if (strict_overflow_p)
13082 fold_overflow_warning (("assuming signed overflow does not occur "
13083 "when simplifying comparison of "
13084 "absolute value and zero"),
13085 WARN_STRICT_OVERFLOW_CONDITIONAL);
13086 return omit_one_operand_loc (loc, type,
13087 constant_boolean_node (false, type),
13088 arg0);
13089 }
13090
13091 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
13092 and similarly for >= into !=. */
13093 if ((code == LT_EXPR || code == GE_EXPR)
13094 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13095 && TREE_CODE (arg1) == LSHIFT_EXPR
13096 && integer_onep (TREE_OPERAND (arg1, 0)))
13097 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13098 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13099 TREE_OPERAND (arg1, 1)),
13100 build_zero_cst (TREE_TYPE (arg0)));
13101
13102 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
13103 otherwise Y might be >= # of bits in X's type and thus e.g.
13104 (unsigned char) (1 << Y) for Y 15 might be 0.
13105 If the cast is widening, then 1 << Y should have unsigned type,
13106 otherwise if Y is number of bits in the signed shift type minus 1,
13107 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
13108 31 might be 0xffffffff80000000. */
13109 if ((code == LT_EXPR || code == GE_EXPR)
13110 && TYPE_UNSIGNED (TREE_TYPE (arg0))
13111 && CONVERT_EXPR_P (arg1)
13112 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
13113 && (element_precision (TREE_TYPE (arg1))
13114 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
13115 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
13116 || (element_precision (TREE_TYPE (arg1))
13117 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
13118 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
13119 {
13120 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
13121 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
13122 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
13123 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
13124 build_zero_cst (TREE_TYPE (arg0)));
13125 }
13126
13127 return NULL_TREE;
13128
13129 case UNORDERED_EXPR:
13130 case ORDERED_EXPR:
13131 case UNLT_EXPR:
13132 case UNLE_EXPR:
13133 case UNGT_EXPR:
13134 case UNGE_EXPR:
13135 case UNEQ_EXPR:
13136 case LTGT_EXPR:
13137 if (TREE_CODE (arg0) == REAL_CST && TREE_CODE (arg1) == REAL_CST)
13138 {
13139 t1 = fold_relational_const (code, type, arg0, arg1);
13140 if (t1 != NULL_TREE)
13141 return t1;
13142 }
13143
13144 /* If the first operand is NaN, the result is constant. */
13145 if (TREE_CODE (arg0) == REAL_CST
13146 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg0))
13147 && (code != LTGT_EXPR || ! flag_trapping_math))
13148 {
13149 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13150 ? integer_zero_node
13151 : integer_one_node;
13152 return omit_one_operand_loc (loc, type, t1, arg1);
13153 }
13154
13155 /* If the second operand is NaN, the result is constant. */
13156 if (TREE_CODE (arg1) == REAL_CST
13157 && REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
13158 && (code != LTGT_EXPR || ! flag_trapping_math))
13159 {
13160 t1 = (code == ORDERED_EXPR || code == LTGT_EXPR)
13161 ? integer_zero_node
13162 : integer_one_node;
13163 return omit_one_operand_loc (loc, type, t1, arg0);
13164 }
13165
13166 /* Simplify unordered comparison of something with itself. */
13167 if ((code == UNLE_EXPR || code == UNGE_EXPR || code == UNEQ_EXPR)
13168 && operand_equal_p (arg0, arg1, 0))
13169 return constant_boolean_node (1, type);
13170
13171 if (code == LTGT_EXPR
13172 && !flag_trapping_math
13173 && operand_equal_p (arg0, arg1, 0))
13174 return constant_boolean_node (0, type);
13175
13176 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
13177 {
13178 tree targ0 = strip_float_extensions (arg0);
13179 tree targ1 = strip_float_extensions (arg1);
13180 tree newtype = TREE_TYPE (targ0);
13181
13182 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
13183 newtype = TREE_TYPE (targ1);
13184
13185 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
13186 return fold_build2_loc (loc, code, type,
13187 fold_convert_loc (loc, newtype, targ0),
13188 fold_convert_loc (loc, newtype, targ1));
13189 }
13190
13191 return NULL_TREE;
13192
13193 case COMPOUND_EXPR:
13194 /* When pedantic, a compound expression can be neither an lvalue
13195 nor an integer constant expression. */
13196 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
13197 return NULL_TREE;
13198 /* Don't let (0, 0) be null pointer constant. */
13199 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
13200 : fold_convert_loc (loc, type, arg1);
13201 return pedantic_non_lvalue_loc (loc, tem);
13202
13203 case ASSERT_EXPR:
13204 /* An ASSERT_EXPR should never be passed to fold_binary. */
13205 gcc_unreachable ();
13206
13207 default:
13208 return NULL_TREE;
13209 } /* switch (code) */
13210 }
13211
13212 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
13213 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
13214 of GOTO_EXPR. */
13215
13216 static tree
13217 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
13218 {
13219 switch (TREE_CODE (*tp))
13220 {
13221 case LABEL_EXPR:
13222 return *tp;
13223
13224 case GOTO_EXPR:
13225 *walk_subtrees = 0;
13226
13227 /* ... fall through ... */
13228
13229 default:
13230 return NULL_TREE;
13231 }
13232 }
13233
13234 /* Return whether the sub-tree ST contains a label which is accessible from
13235 outside the sub-tree. */
13236
13237 static bool
13238 contains_label_p (tree st)
13239 {
13240 return
13241 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
13242 }
13243
13244 /* Fold a ternary expression of code CODE and type TYPE with operands
13245 OP0, OP1, and OP2. Return the folded expression if folding is
13246 successful. Otherwise, return NULL_TREE. */
13247
13248 tree
13249 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
13250 tree op0, tree op1, tree op2)
13251 {
13252 tree tem;
13253 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
13254 enum tree_code_class kind = TREE_CODE_CLASS (code);
13255
13256 gcc_assert (IS_EXPR_CODE_CLASS (kind)
13257 && TREE_CODE_LENGTH (code) == 3);
13258
13259 /* If this is a commutative operation, and OP0 is a constant, move it
13260 to OP1 to reduce the number of tests below. */
13261 if (commutative_ternary_tree_code (code)
13262 && tree_swap_operands_p (op0, op1, true))
13263 return fold_build3_loc (loc, code, type, op1, op0, op2);
13264
13265 tem = generic_simplify (loc, code, type, op0, op1, op2);
13266 if (tem)
13267 return tem;
13268
13269 /* Strip any conversions that don't change the mode. This is safe
13270 for every expression, except for a comparison expression because
13271 its signedness is derived from its operands. So, in the latter
13272 case, only strip conversions that don't change the signedness.
13273
13274 Note that this is done as an internal manipulation within the
13275 constant folder, in order to find the simplest representation of
13276 the arguments so that their form can be studied. In any cases,
13277 the appropriate type conversions should be put back in the tree
13278 that will get out of the constant folder. */
13279 if (op0)
13280 {
13281 arg0 = op0;
13282 STRIP_NOPS (arg0);
13283 }
13284
13285 if (op1)
13286 {
13287 arg1 = op1;
13288 STRIP_NOPS (arg1);
13289 }
13290
13291 if (op2)
13292 {
13293 arg2 = op2;
13294 STRIP_NOPS (arg2);
13295 }
13296
13297 switch (code)
13298 {
13299 case COMPONENT_REF:
13300 if (TREE_CODE (arg0) == CONSTRUCTOR
13301 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
13302 {
13303 unsigned HOST_WIDE_INT idx;
13304 tree field, value;
13305 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
13306 if (field == arg1)
13307 return value;
13308 }
13309 return NULL_TREE;
13310
13311 case COND_EXPR:
13312 case VEC_COND_EXPR:
13313 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
13314 so all simple results must be passed through pedantic_non_lvalue. */
13315 if (TREE_CODE (arg0) == INTEGER_CST)
13316 {
13317 tree unused_op = integer_zerop (arg0) ? op1 : op2;
13318 tem = integer_zerop (arg0) ? op2 : op1;
13319 /* Only optimize constant conditions when the selected branch
13320 has the same type as the COND_EXPR. This avoids optimizing
13321 away "c ? x : throw", where the throw has a void type.
13322 Avoid throwing away that operand which contains label. */
13323 if ((!TREE_SIDE_EFFECTS (unused_op)
13324 || !contains_label_p (unused_op))
13325 && (! VOID_TYPE_P (TREE_TYPE (tem))
13326 || VOID_TYPE_P (type)))
13327 return pedantic_non_lvalue_loc (loc, tem);
13328 return NULL_TREE;
13329 }
13330 else if (TREE_CODE (arg0) == VECTOR_CST)
13331 {
13332 if ((TREE_CODE (arg1) == VECTOR_CST
13333 || TREE_CODE (arg1) == CONSTRUCTOR)
13334 && (TREE_CODE (arg2) == VECTOR_CST
13335 || TREE_CODE (arg2) == CONSTRUCTOR))
13336 {
13337 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
13338 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
13339 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
13340 for (i = 0; i < nelts; i++)
13341 {
13342 tree val = VECTOR_CST_ELT (arg0, i);
13343 if (integer_all_onesp (val))
13344 sel[i] = i;
13345 else if (integer_zerop (val))
13346 sel[i] = nelts + i;
13347 else /* Currently unreachable. */
13348 return NULL_TREE;
13349 }
13350 tree t = fold_vec_perm (type, arg1, arg2, sel);
13351 if (t != NULL_TREE)
13352 return t;
13353 }
13354 }
13355
13356 /* If we have A op B ? A : C, we may be able to convert this to a
13357 simpler expression, depending on the operation and the values
13358 of B and C. Signed zeros prevent all of these transformations,
13359 for reasons given above each one.
13360
13361 Also try swapping the arguments and inverting the conditional. */
13362 if (COMPARISON_CLASS_P (arg0)
13363 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13364 arg1, TREE_OPERAND (arg0, 1))
13365 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
13366 {
13367 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
13368 if (tem)
13369 return tem;
13370 }
13371
13372 if (COMPARISON_CLASS_P (arg0)
13373 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
13374 op2,
13375 TREE_OPERAND (arg0, 1))
13376 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
13377 {
13378 location_t loc0 = expr_location_or (arg0, loc);
13379 tem = fold_invert_truthvalue (loc0, arg0);
13380 if (tem && COMPARISON_CLASS_P (tem))
13381 {
13382 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
13383 if (tem)
13384 return tem;
13385 }
13386 }
13387
13388 /* If the second operand is simpler than the third, swap them
13389 since that produces better jump optimization results. */
13390 if (truth_value_p (TREE_CODE (arg0))
13391 && tree_swap_operands_p (op1, op2, false))
13392 {
13393 location_t loc0 = expr_location_or (arg0, loc);
13394 /* See if this can be inverted. If it can't, possibly because
13395 it was a floating-point inequality comparison, don't do
13396 anything. */
13397 tem = fold_invert_truthvalue (loc0, arg0);
13398 if (tem)
13399 return fold_build3_loc (loc, code, type, tem, op2, op1);
13400 }
13401
13402 /* Convert A ? 1 : 0 to simply A. */
13403 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
13404 : (integer_onep (op1)
13405 && !VECTOR_TYPE_P (type)))
13406 && integer_zerop (op2)
13407 /* If we try to convert OP0 to our type, the
13408 call to fold will try to move the conversion inside
13409 a COND, which will recurse. In that case, the COND_EXPR
13410 is probably the best choice, so leave it alone. */
13411 && type == TREE_TYPE (arg0))
13412 return pedantic_non_lvalue_loc (loc, arg0);
13413
13414 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
13415 over COND_EXPR in cases such as floating point comparisons. */
13416 if (integer_zerop (op1)
13417 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
13418 : (integer_onep (op2)
13419 && !VECTOR_TYPE_P (type)))
13420 && truth_value_p (TREE_CODE (arg0)))
13421 return pedantic_non_lvalue_loc (loc,
13422 fold_convert_loc (loc, type,
13423 invert_truthvalue_loc (loc,
13424 arg0)));
13425
13426 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
13427 if (TREE_CODE (arg0) == LT_EXPR
13428 && integer_zerop (TREE_OPERAND (arg0, 1))
13429 && integer_zerop (op2)
13430 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
13431 {
13432 /* sign_bit_p looks through both zero and sign extensions,
13433 but for this optimization only sign extensions are
13434 usable. */
13435 tree tem2 = TREE_OPERAND (arg0, 0);
13436 while (tem != tem2)
13437 {
13438 if (TREE_CODE (tem2) != NOP_EXPR
13439 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
13440 {
13441 tem = NULL_TREE;
13442 break;
13443 }
13444 tem2 = TREE_OPERAND (tem2, 0);
13445 }
13446 /* sign_bit_p only checks ARG1 bits within A's precision.
13447 If <sign bit of A> has wider type than A, bits outside
13448 of A's precision in <sign bit of A> need to be checked.
13449 If they are all 0, this optimization needs to be done
13450 in unsigned A's type, if they are all 1 in signed A's type,
13451 otherwise this can't be done. */
13452 if (tem
13453 && TYPE_PRECISION (TREE_TYPE (tem))
13454 < TYPE_PRECISION (TREE_TYPE (arg1))
13455 && TYPE_PRECISION (TREE_TYPE (tem))
13456 < TYPE_PRECISION (type))
13457 {
13458 int inner_width, outer_width;
13459 tree tem_type;
13460
13461 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
13462 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
13463 if (outer_width > TYPE_PRECISION (type))
13464 outer_width = TYPE_PRECISION (type);
13465
13466 wide_int mask = wi::shifted_mask
13467 (inner_width, outer_width - inner_width, false,
13468 TYPE_PRECISION (TREE_TYPE (arg1)));
13469
13470 wide_int common = mask & arg1;
13471 if (common == mask)
13472 {
13473 tem_type = signed_type_for (TREE_TYPE (tem));
13474 tem = fold_convert_loc (loc, tem_type, tem);
13475 }
13476 else if (common == 0)
13477 {
13478 tem_type = unsigned_type_for (TREE_TYPE (tem));
13479 tem = fold_convert_loc (loc, tem_type, tem);
13480 }
13481 else
13482 tem = NULL;
13483 }
13484
13485 if (tem)
13486 return
13487 fold_convert_loc (loc, type,
13488 fold_build2_loc (loc, BIT_AND_EXPR,
13489 TREE_TYPE (tem), tem,
13490 fold_convert_loc (loc,
13491 TREE_TYPE (tem),
13492 arg1)));
13493 }
13494
13495 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
13496 already handled above. */
13497 if (TREE_CODE (arg0) == BIT_AND_EXPR
13498 && integer_onep (TREE_OPERAND (arg0, 1))
13499 && integer_zerop (op2)
13500 && integer_pow2p (arg1))
13501 {
13502 tree tem = TREE_OPERAND (arg0, 0);
13503 STRIP_NOPS (tem);
13504 if (TREE_CODE (tem) == RSHIFT_EXPR
13505 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
13506 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
13507 tree_to_uhwi (TREE_OPERAND (tem, 1)))
13508 return fold_build2_loc (loc, BIT_AND_EXPR, type,
13509 TREE_OPERAND (tem, 0), arg1);
13510 }
13511
13512 /* A & N ? N : 0 is simply A & N if N is a power of two. This
13513 is probably obsolete because the first operand should be a
13514 truth value (that's why we have the two cases above), but let's
13515 leave it in until we can confirm this for all front-ends. */
13516 if (integer_zerop (op2)
13517 && TREE_CODE (arg0) == NE_EXPR
13518 && integer_zerop (TREE_OPERAND (arg0, 1))
13519 && integer_pow2p (arg1)
13520 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
13521 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
13522 arg1, OEP_ONLY_CONST))
13523 return pedantic_non_lvalue_loc (loc,
13524 fold_convert_loc (loc, type,
13525 TREE_OPERAND (arg0, 0)));
13526
13527 /* Disable the transformations below for vectors, since
13528 fold_binary_op_with_conditional_arg may undo them immediately,
13529 yielding an infinite loop. */
13530 if (code == VEC_COND_EXPR)
13531 return NULL_TREE;
13532
13533 /* Convert A ? B : 0 into A && B if A and B are truth values. */
13534 if (integer_zerop (op2)
13535 && truth_value_p (TREE_CODE (arg0))
13536 && truth_value_p (TREE_CODE (arg1))
13537 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13538 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
13539 : TRUTH_ANDIF_EXPR,
13540 type, fold_convert_loc (loc, type, arg0), arg1);
13541
13542 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
13543 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
13544 && truth_value_p (TREE_CODE (arg0))
13545 && truth_value_p (TREE_CODE (arg1))
13546 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13547 {
13548 location_t loc0 = expr_location_or (arg0, loc);
13549 /* Only perform transformation if ARG0 is easily inverted. */
13550 tem = fold_invert_truthvalue (loc0, arg0);
13551 if (tem)
13552 return fold_build2_loc (loc, code == VEC_COND_EXPR
13553 ? BIT_IOR_EXPR
13554 : TRUTH_ORIF_EXPR,
13555 type, fold_convert_loc (loc, type, tem),
13556 arg1);
13557 }
13558
13559 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
13560 if (integer_zerop (arg1)
13561 && truth_value_p (TREE_CODE (arg0))
13562 && truth_value_p (TREE_CODE (op2))
13563 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13564 {
13565 location_t loc0 = expr_location_or (arg0, loc);
13566 /* Only perform transformation if ARG0 is easily inverted. */
13567 tem = fold_invert_truthvalue (loc0, arg0);
13568 if (tem)
13569 return fold_build2_loc (loc, code == VEC_COND_EXPR
13570 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
13571 type, fold_convert_loc (loc, type, tem),
13572 op2);
13573 }
13574
13575 /* Convert A ? 1 : B into A || B if A and B are truth values. */
13576 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
13577 && truth_value_p (TREE_CODE (arg0))
13578 && truth_value_p (TREE_CODE (op2))
13579 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
13580 return fold_build2_loc (loc, code == VEC_COND_EXPR
13581 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
13582 type, fold_convert_loc (loc, type, arg0), op2);
13583
13584 return NULL_TREE;
13585
13586 case CALL_EXPR:
13587 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
13588 of fold_ternary on them. */
13589 gcc_unreachable ();
13590
13591 case BIT_FIELD_REF:
13592 if ((TREE_CODE (arg0) == VECTOR_CST
13593 || (TREE_CODE (arg0) == CONSTRUCTOR
13594 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
13595 && (type == TREE_TYPE (TREE_TYPE (arg0))
13596 || (TREE_CODE (type) == VECTOR_TYPE
13597 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
13598 {
13599 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
13600 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
13601 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
13602 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
13603
13604 if (n != 0
13605 && (idx % width) == 0
13606 && (n % width) == 0
13607 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
13608 {
13609 idx = idx / width;
13610 n = n / width;
13611
13612 if (TREE_CODE (arg0) == VECTOR_CST)
13613 {
13614 if (n == 1)
13615 return VECTOR_CST_ELT (arg0, idx);
13616
13617 tree *vals = XALLOCAVEC (tree, n);
13618 for (unsigned i = 0; i < n; ++i)
13619 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
13620 return build_vector (type, vals);
13621 }
13622
13623 /* Constructor elements can be subvectors. */
13624 unsigned HOST_WIDE_INT k = 1;
13625 if (CONSTRUCTOR_NELTS (arg0) != 0)
13626 {
13627 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
13628 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
13629 k = TYPE_VECTOR_SUBPARTS (cons_elem);
13630 }
13631
13632 /* We keep an exact subset of the constructor elements. */
13633 if ((idx % k) == 0 && (n % k) == 0)
13634 {
13635 if (CONSTRUCTOR_NELTS (arg0) == 0)
13636 return build_constructor (type, NULL);
13637 idx /= k;
13638 n /= k;
13639 if (n == 1)
13640 {
13641 if (idx < CONSTRUCTOR_NELTS (arg0))
13642 return CONSTRUCTOR_ELT (arg0, idx)->value;
13643 return build_zero_cst (type);
13644 }
13645
13646 vec<constructor_elt, va_gc> *vals;
13647 vec_alloc (vals, n);
13648 for (unsigned i = 0;
13649 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
13650 ++i)
13651 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
13652 CONSTRUCTOR_ELT
13653 (arg0, idx + i)->value);
13654 return build_constructor (type, vals);
13655 }
13656 /* The bitfield references a single constructor element. */
13657 else if (idx + n <= (idx / k + 1) * k)
13658 {
13659 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
13660 return build_zero_cst (type);
13661 else if (n == k)
13662 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
13663 else
13664 return fold_build3_loc (loc, code, type,
13665 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
13666 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
13667 }
13668 }
13669 }
13670
13671 /* A bit-field-ref that referenced the full argument can be stripped. */
13672 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
13673 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
13674 && integer_zerop (op2))
13675 return fold_convert_loc (loc, type, arg0);
13676
13677 /* On constants we can use native encode/interpret to constant
13678 fold (nearly) all BIT_FIELD_REFs. */
13679 if (CONSTANT_CLASS_P (arg0)
13680 && can_native_interpret_type_p (type)
13681 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
13682 /* This limitation should not be necessary, we just need to
13683 round this up to mode size. */
13684 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
13685 /* Need bit-shifting of the buffer to relax the following. */
13686 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
13687 {
13688 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
13689 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
13690 unsigned HOST_WIDE_INT clen;
13691 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
13692 /* ??? We cannot tell native_encode_expr to start at
13693 some random byte only. So limit us to a reasonable amount
13694 of work. */
13695 if (clen <= 4096)
13696 {
13697 unsigned char *b = XALLOCAVEC (unsigned char, clen);
13698 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
13699 if (len > 0
13700 && len * BITS_PER_UNIT >= bitpos + bitsize)
13701 {
13702 tree v = native_interpret_expr (type,
13703 b + bitpos / BITS_PER_UNIT,
13704 bitsize / BITS_PER_UNIT);
13705 if (v)
13706 return v;
13707 }
13708 }
13709 }
13710
13711 return NULL_TREE;
13712
13713 case FMA_EXPR:
13714 /* For integers we can decompose the FMA if possible. */
13715 if (TREE_CODE (arg0) == INTEGER_CST
13716 && TREE_CODE (arg1) == INTEGER_CST)
13717 return fold_build2_loc (loc, PLUS_EXPR, type,
13718 const_binop (MULT_EXPR, arg0, arg1), arg2);
13719 if (integer_zerop (arg2))
13720 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
13721
13722 return fold_fma (loc, type, arg0, arg1, arg2);
13723
13724 case VEC_PERM_EXPR:
13725 if (TREE_CODE (arg2) == VECTOR_CST)
13726 {
13727 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
13728 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
13729 unsigned char *sel2 = sel + nelts;
13730 bool need_mask_canon = false;
13731 bool need_mask_canon2 = false;
13732 bool all_in_vec0 = true;
13733 bool all_in_vec1 = true;
13734 bool maybe_identity = true;
13735 bool single_arg = (op0 == op1);
13736 bool changed = false;
13737
13738 mask2 = 2 * nelts - 1;
13739 mask = single_arg ? (nelts - 1) : mask2;
13740 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
13741 for (i = 0; i < nelts; i++)
13742 {
13743 tree val = VECTOR_CST_ELT (arg2, i);
13744 if (TREE_CODE (val) != INTEGER_CST)
13745 return NULL_TREE;
13746
13747 /* Make sure that the perm value is in an acceptable
13748 range. */
13749 wide_int t = val;
13750 need_mask_canon |= wi::gtu_p (t, mask);
13751 need_mask_canon2 |= wi::gtu_p (t, mask2);
13752 sel[i] = t.to_uhwi () & mask;
13753 sel2[i] = t.to_uhwi () & mask2;
13754
13755 if (sel[i] < nelts)
13756 all_in_vec1 = false;
13757 else
13758 all_in_vec0 = false;
13759
13760 if ((sel[i] & (nelts-1)) != i)
13761 maybe_identity = false;
13762 }
13763
13764 if (maybe_identity)
13765 {
13766 if (all_in_vec0)
13767 return op0;
13768 if (all_in_vec1)
13769 return op1;
13770 }
13771
13772 if (all_in_vec0)
13773 op1 = op0;
13774 else if (all_in_vec1)
13775 {
13776 op0 = op1;
13777 for (i = 0; i < nelts; i++)
13778 sel[i] -= nelts;
13779 need_mask_canon = true;
13780 }
13781
13782 if ((TREE_CODE (op0) == VECTOR_CST
13783 || TREE_CODE (op0) == CONSTRUCTOR)
13784 && (TREE_CODE (op1) == VECTOR_CST
13785 || TREE_CODE (op1) == CONSTRUCTOR))
13786 {
13787 tree t = fold_vec_perm (type, op0, op1, sel);
13788 if (t != NULL_TREE)
13789 return t;
13790 }
13791
13792 if (op0 == op1 && !single_arg)
13793 changed = true;
13794
13795 /* Some targets are deficient and fail to expand a single
13796 argument permutation while still allowing an equivalent
13797 2-argument version. */
13798 if (need_mask_canon && arg2 == op2
13799 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
13800 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
13801 {
13802 need_mask_canon = need_mask_canon2;
13803 sel = sel2;
13804 }
13805
13806 if (need_mask_canon && arg2 == op2)
13807 {
13808 tree *tsel = XALLOCAVEC (tree, nelts);
13809 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
13810 for (i = 0; i < nelts; i++)
13811 tsel[i] = build_int_cst (eltype, sel[i]);
13812 op2 = build_vector (TREE_TYPE (arg2), tsel);
13813 changed = true;
13814 }
13815
13816 if (changed)
13817 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
13818 }
13819 return NULL_TREE;
13820
13821 default:
13822 return NULL_TREE;
13823 } /* switch (code) */
13824 }
13825
13826 /* Perform constant folding and related simplification of EXPR.
13827 The related simplifications include x*1 => x, x*0 => 0, etc.,
13828 and application of the associative law.
13829 NOP_EXPR conversions may be removed freely (as long as we
13830 are careful not to change the type of the overall expression).
13831 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
13832 but we can constant-fold them if they have constant operands. */
13833
13834 #ifdef ENABLE_FOLD_CHECKING
13835 # define fold(x) fold_1 (x)
13836 static tree fold_1 (tree);
13837 static
13838 #endif
13839 tree
13840 fold (tree expr)
13841 {
13842 const tree t = expr;
13843 enum tree_code code = TREE_CODE (t);
13844 enum tree_code_class kind = TREE_CODE_CLASS (code);
13845 tree tem;
13846 location_t loc = EXPR_LOCATION (expr);
13847
13848 /* Return right away if a constant. */
13849 if (kind == tcc_constant)
13850 return t;
13851
13852 /* CALL_EXPR-like objects with variable numbers of operands are
13853 treated specially. */
13854 if (kind == tcc_vl_exp)
13855 {
13856 if (code == CALL_EXPR)
13857 {
13858 tem = fold_call_expr (loc, expr, false);
13859 return tem ? tem : expr;
13860 }
13861 return expr;
13862 }
13863
13864 if (IS_EXPR_CODE_CLASS (kind))
13865 {
13866 tree type = TREE_TYPE (t);
13867 tree op0, op1, op2;
13868
13869 switch (TREE_CODE_LENGTH (code))
13870 {
13871 case 1:
13872 op0 = TREE_OPERAND (t, 0);
13873 tem = fold_unary_loc (loc, code, type, op0);
13874 return tem ? tem : expr;
13875 case 2:
13876 op0 = TREE_OPERAND (t, 0);
13877 op1 = TREE_OPERAND (t, 1);
13878 tem = fold_binary_loc (loc, code, type, op0, op1);
13879 return tem ? tem : expr;
13880 case 3:
13881 op0 = TREE_OPERAND (t, 0);
13882 op1 = TREE_OPERAND (t, 1);
13883 op2 = TREE_OPERAND (t, 2);
13884 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
13885 return tem ? tem : expr;
13886 default:
13887 break;
13888 }
13889 }
13890
13891 switch (code)
13892 {
13893 case ARRAY_REF:
13894 {
13895 tree op0 = TREE_OPERAND (t, 0);
13896 tree op1 = TREE_OPERAND (t, 1);
13897
13898 if (TREE_CODE (op1) == INTEGER_CST
13899 && TREE_CODE (op0) == CONSTRUCTOR
13900 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
13901 {
13902 vec<constructor_elt, va_gc> *elts = CONSTRUCTOR_ELTS (op0);
13903 unsigned HOST_WIDE_INT end = vec_safe_length (elts);
13904 unsigned HOST_WIDE_INT begin = 0;
13905
13906 /* Find a matching index by means of a binary search. */
13907 while (begin != end)
13908 {
13909 unsigned HOST_WIDE_INT middle = (begin + end) / 2;
13910 tree index = (*elts)[middle].index;
13911
13912 if (TREE_CODE (index) == INTEGER_CST
13913 && tree_int_cst_lt (index, op1))
13914 begin = middle + 1;
13915 else if (TREE_CODE (index) == INTEGER_CST
13916 && tree_int_cst_lt (op1, index))
13917 end = middle;
13918 else if (TREE_CODE (index) == RANGE_EXPR
13919 && tree_int_cst_lt (TREE_OPERAND (index, 1), op1))
13920 begin = middle + 1;
13921 else if (TREE_CODE (index) == RANGE_EXPR
13922 && tree_int_cst_lt (op1, TREE_OPERAND (index, 0)))
13923 end = middle;
13924 else
13925 return (*elts)[middle].value;
13926 }
13927 }
13928
13929 return t;
13930 }
13931
13932 /* Return a VECTOR_CST if possible. */
13933 case CONSTRUCTOR:
13934 {
13935 tree type = TREE_TYPE (t);
13936 if (TREE_CODE (type) != VECTOR_TYPE)
13937 return t;
13938
13939 tree *vec = XALLOCAVEC (tree, TYPE_VECTOR_SUBPARTS (type));
13940 unsigned HOST_WIDE_INT idx, pos = 0;
13941 tree value;
13942
13943 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), idx, value)
13944 {
13945 if (!CONSTANT_CLASS_P (value))
13946 return t;
13947 if (TREE_CODE (value) == VECTOR_CST)
13948 {
13949 for (unsigned i = 0; i < VECTOR_CST_NELTS (value); ++i)
13950 vec[pos++] = VECTOR_CST_ELT (value, i);
13951 }
13952 else
13953 vec[pos++] = value;
13954 }
13955 for (; pos < TYPE_VECTOR_SUBPARTS (type); ++pos)
13956 vec[pos] = build_zero_cst (TREE_TYPE (type));
13957
13958 return build_vector (type, vec);
13959 }
13960
13961 case CONST_DECL:
13962 return fold (DECL_INITIAL (t));
13963
13964 default:
13965 return t;
13966 } /* switch (code) */
13967 }
13968
13969 #ifdef ENABLE_FOLD_CHECKING
13970 #undef fold
13971
13972 static void fold_checksum_tree (const_tree, struct md5_ctx *,
13973 hash_table<pointer_hash<const tree_node> > *);
13974 static void fold_check_failed (const_tree, const_tree);
13975 void print_fold_checksum (const_tree);
13976
13977 /* When --enable-checking=fold, compute a digest of expr before
13978 and after actual fold call to see if fold did not accidentally
13979 change original expr. */
13980
13981 tree
13982 fold (tree expr)
13983 {
13984 tree ret;
13985 struct md5_ctx ctx;
13986 unsigned char checksum_before[16], checksum_after[16];
13987 hash_table<pointer_hash<const tree_node> > ht (32);
13988
13989 md5_init_ctx (&ctx);
13990 fold_checksum_tree (expr, &ctx, &ht);
13991 md5_finish_ctx (&ctx, checksum_before);
13992 ht.empty ();
13993
13994 ret = fold_1 (expr);
13995
13996 md5_init_ctx (&ctx);
13997 fold_checksum_tree (expr, &ctx, &ht);
13998 md5_finish_ctx (&ctx, checksum_after);
13999
14000 if (memcmp (checksum_before, checksum_after, 16))
14001 fold_check_failed (expr, ret);
14002
14003 return ret;
14004 }
14005
14006 void
14007 print_fold_checksum (const_tree expr)
14008 {
14009 struct md5_ctx ctx;
14010 unsigned char checksum[16], cnt;
14011 hash_table<pointer_hash<const tree_node> > ht (32);
14012
14013 md5_init_ctx (&ctx);
14014 fold_checksum_tree (expr, &ctx, &ht);
14015 md5_finish_ctx (&ctx, checksum);
14016 for (cnt = 0; cnt < 16; ++cnt)
14017 fprintf (stderr, "%02x", checksum[cnt]);
14018 putc ('\n', stderr);
14019 }
14020
14021 static void
14022 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
14023 {
14024 internal_error ("fold check: original tree changed by fold");
14025 }
14026
14027 static void
14028 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
14029 hash_table<pointer_hash <const tree_node> > *ht)
14030 {
14031 const tree_node **slot;
14032 enum tree_code code;
14033 union tree_node buf;
14034 int i, len;
14035
14036 recursive_label:
14037 if (expr == NULL)
14038 return;
14039 slot = ht->find_slot (expr, INSERT);
14040 if (*slot != NULL)
14041 return;
14042 *slot = expr;
14043 code = TREE_CODE (expr);
14044 if (TREE_CODE_CLASS (code) == tcc_declaration
14045 && HAS_DECL_ASSEMBLER_NAME_P (expr))
14046 {
14047 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
14048 memcpy ((char *) &buf, expr, tree_size (expr));
14049 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
14050 buf.decl_with_vis.symtab_node = NULL;
14051 expr = (tree) &buf;
14052 }
14053 else if (TREE_CODE_CLASS (code) == tcc_type
14054 && (TYPE_POINTER_TO (expr)
14055 || TYPE_REFERENCE_TO (expr)
14056 || TYPE_CACHED_VALUES_P (expr)
14057 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
14058 || TYPE_NEXT_VARIANT (expr)))
14059 {
14060 /* Allow these fields to be modified. */
14061 tree tmp;
14062 memcpy ((char *) &buf, expr, tree_size (expr));
14063 expr = tmp = (tree) &buf;
14064 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
14065 TYPE_POINTER_TO (tmp) = NULL;
14066 TYPE_REFERENCE_TO (tmp) = NULL;
14067 TYPE_NEXT_VARIANT (tmp) = NULL;
14068 if (TYPE_CACHED_VALUES_P (tmp))
14069 {
14070 TYPE_CACHED_VALUES_P (tmp) = 0;
14071 TYPE_CACHED_VALUES (tmp) = NULL;
14072 }
14073 }
14074 md5_process_bytes (expr, tree_size (expr), ctx);
14075 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
14076 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
14077 if (TREE_CODE_CLASS (code) != tcc_type
14078 && TREE_CODE_CLASS (code) != tcc_declaration
14079 && code != TREE_LIST
14080 && code != SSA_NAME
14081 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
14082 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
14083 switch (TREE_CODE_CLASS (code))
14084 {
14085 case tcc_constant:
14086 switch (code)
14087 {
14088 case STRING_CST:
14089 md5_process_bytes (TREE_STRING_POINTER (expr),
14090 TREE_STRING_LENGTH (expr), ctx);
14091 break;
14092 case COMPLEX_CST:
14093 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
14094 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
14095 break;
14096 case VECTOR_CST:
14097 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
14098 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
14099 break;
14100 default:
14101 break;
14102 }
14103 break;
14104 case tcc_exceptional:
14105 switch (code)
14106 {
14107 case TREE_LIST:
14108 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
14109 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
14110 expr = TREE_CHAIN (expr);
14111 goto recursive_label;
14112 break;
14113 case TREE_VEC:
14114 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
14115 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
14116 break;
14117 default:
14118 break;
14119 }
14120 break;
14121 case tcc_expression:
14122 case tcc_reference:
14123 case tcc_comparison:
14124 case tcc_unary:
14125 case tcc_binary:
14126 case tcc_statement:
14127 case tcc_vl_exp:
14128 len = TREE_OPERAND_LENGTH (expr);
14129 for (i = 0; i < len; ++i)
14130 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
14131 break;
14132 case tcc_declaration:
14133 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
14134 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
14135 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
14136 {
14137 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
14138 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
14139 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
14140 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
14141 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
14142 }
14143
14144 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
14145 {
14146 if (TREE_CODE (expr) == FUNCTION_DECL)
14147 {
14148 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
14149 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
14150 }
14151 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
14152 }
14153 break;
14154 case tcc_type:
14155 if (TREE_CODE (expr) == ENUMERAL_TYPE)
14156 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
14157 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
14158 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
14159 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
14160 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
14161 if (INTEGRAL_TYPE_P (expr)
14162 || SCALAR_FLOAT_TYPE_P (expr))
14163 {
14164 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
14165 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
14166 }
14167 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
14168 if (TREE_CODE (expr) == RECORD_TYPE
14169 || TREE_CODE (expr) == UNION_TYPE
14170 || TREE_CODE (expr) == QUAL_UNION_TYPE)
14171 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
14172 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
14173 break;
14174 default:
14175 break;
14176 }
14177 }
14178
14179 /* Helper function for outputting the checksum of a tree T. When
14180 debugging with gdb, you can "define mynext" to be "next" followed
14181 by "call debug_fold_checksum (op0)", then just trace down till the
14182 outputs differ. */
14183
14184 DEBUG_FUNCTION void
14185 debug_fold_checksum (const_tree t)
14186 {
14187 int i;
14188 unsigned char checksum[16];
14189 struct md5_ctx ctx;
14190 hash_table<pointer_hash<const tree_node> > ht (32);
14191
14192 md5_init_ctx (&ctx);
14193 fold_checksum_tree (t, &ctx, &ht);
14194 md5_finish_ctx (&ctx, checksum);
14195 ht.empty ();
14196
14197 for (i = 0; i < 16; i++)
14198 fprintf (stderr, "%d ", checksum[i]);
14199
14200 fprintf (stderr, "\n");
14201 }
14202
14203 #endif
14204
14205 /* Fold a unary tree expression with code CODE of type TYPE with an
14206 operand OP0. LOC is the location of the resulting expression.
14207 Return a folded expression if successful. Otherwise, return a tree
14208 expression with code CODE of type TYPE with an operand OP0. */
14209
14210 tree
14211 fold_build1_stat_loc (location_t loc,
14212 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
14213 {
14214 tree tem;
14215 #ifdef ENABLE_FOLD_CHECKING
14216 unsigned char checksum_before[16], checksum_after[16];
14217 struct md5_ctx ctx;
14218 hash_table<pointer_hash<const tree_node> > ht (32);
14219
14220 md5_init_ctx (&ctx);
14221 fold_checksum_tree (op0, &ctx, &ht);
14222 md5_finish_ctx (&ctx, checksum_before);
14223 ht.empty ();
14224 #endif
14225
14226 tem = fold_unary_loc (loc, code, type, op0);
14227 if (!tem)
14228 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
14229
14230 #ifdef ENABLE_FOLD_CHECKING
14231 md5_init_ctx (&ctx);
14232 fold_checksum_tree (op0, &ctx, &ht);
14233 md5_finish_ctx (&ctx, checksum_after);
14234
14235 if (memcmp (checksum_before, checksum_after, 16))
14236 fold_check_failed (op0, tem);
14237 #endif
14238 return tem;
14239 }
14240
14241 /* Fold a binary tree expression with code CODE of type TYPE with
14242 operands OP0 and OP1. LOC is the location of the resulting
14243 expression. Return a folded expression if successful. Otherwise,
14244 return a tree expression with code CODE of type TYPE with operands
14245 OP0 and OP1. */
14246
14247 tree
14248 fold_build2_stat_loc (location_t loc,
14249 enum tree_code code, tree type, tree op0, tree op1
14250 MEM_STAT_DECL)
14251 {
14252 tree tem;
14253 #ifdef ENABLE_FOLD_CHECKING
14254 unsigned char checksum_before_op0[16],
14255 checksum_before_op1[16],
14256 checksum_after_op0[16],
14257 checksum_after_op1[16];
14258 struct md5_ctx ctx;
14259 hash_table<pointer_hash<const tree_node> > ht (32);
14260
14261 md5_init_ctx (&ctx);
14262 fold_checksum_tree (op0, &ctx, &ht);
14263 md5_finish_ctx (&ctx, checksum_before_op0);
14264 ht.empty ();
14265
14266 md5_init_ctx (&ctx);
14267 fold_checksum_tree (op1, &ctx, &ht);
14268 md5_finish_ctx (&ctx, checksum_before_op1);
14269 ht.empty ();
14270 #endif
14271
14272 tem = fold_binary_loc (loc, code, type, op0, op1);
14273 if (!tem)
14274 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
14275
14276 #ifdef ENABLE_FOLD_CHECKING
14277 md5_init_ctx (&ctx);
14278 fold_checksum_tree (op0, &ctx, &ht);
14279 md5_finish_ctx (&ctx, checksum_after_op0);
14280 ht.empty ();
14281
14282 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14283 fold_check_failed (op0, tem);
14284
14285 md5_init_ctx (&ctx);
14286 fold_checksum_tree (op1, &ctx, &ht);
14287 md5_finish_ctx (&ctx, checksum_after_op1);
14288
14289 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14290 fold_check_failed (op1, tem);
14291 #endif
14292 return tem;
14293 }
14294
14295 /* Fold a ternary tree expression with code CODE of type TYPE with
14296 operands OP0, OP1, and OP2. Return a folded expression if
14297 successful. Otherwise, return a tree expression with code CODE of
14298 type TYPE with operands OP0, OP1, and OP2. */
14299
14300 tree
14301 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
14302 tree op0, tree op1, tree op2 MEM_STAT_DECL)
14303 {
14304 tree tem;
14305 #ifdef ENABLE_FOLD_CHECKING
14306 unsigned char checksum_before_op0[16],
14307 checksum_before_op1[16],
14308 checksum_before_op2[16],
14309 checksum_after_op0[16],
14310 checksum_after_op1[16],
14311 checksum_after_op2[16];
14312 struct md5_ctx ctx;
14313 hash_table<pointer_hash<const tree_node> > ht (32);
14314
14315 md5_init_ctx (&ctx);
14316 fold_checksum_tree (op0, &ctx, &ht);
14317 md5_finish_ctx (&ctx, checksum_before_op0);
14318 ht.empty ();
14319
14320 md5_init_ctx (&ctx);
14321 fold_checksum_tree (op1, &ctx, &ht);
14322 md5_finish_ctx (&ctx, checksum_before_op1);
14323 ht.empty ();
14324
14325 md5_init_ctx (&ctx);
14326 fold_checksum_tree (op2, &ctx, &ht);
14327 md5_finish_ctx (&ctx, checksum_before_op2);
14328 ht.empty ();
14329 #endif
14330
14331 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
14332 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
14333 if (!tem)
14334 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
14335
14336 #ifdef ENABLE_FOLD_CHECKING
14337 md5_init_ctx (&ctx);
14338 fold_checksum_tree (op0, &ctx, &ht);
14339 md5_finish_ctx (&ctx, checksum_after_op0);
14340 ht.empty ();
14341
14342 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
14343 fold_check_failed (op0, tem);
14344
14345 md5_init_ctx (&ctx);
14346 fold_checksum_tree (op1, &ctx, &ht);
14347 md5_finish_ctx (&ctx, checksum_after_op1);
14348 ht.empty ();
14349
14350 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
14351 fold_check_failed (op1, tem);
14352
14353 md5_init_ctx (&ctx);
14354 fold_checksum_tree (op2, &ctx, &ht);
14355 md5_finish_ctx (&ctx, checksum_after_op2);
14356
14357 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
14358 fold_check_failed (op2, tem);
14359 #endif
14360 return tem;
14361 }
14362
14363 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
14364 arguments in ARGARRAY, and a null static chain.
14365 Return a folded expression if successful. Otherwise, return a CALL_EXPR
14366 of type TYPE from the given operands as constructed by build_call_array. */
14367
14368 tree
14369 fold_build_call_array_loc (location_t loc, tree type, tree fn,
14370 int nargs, tree *argarray)
14371 {
14372 tree tem;
14373 #ifdef ENABLE_FOLD_CHECKING
14374 unsigned char checksum_before_fn[16],
14375 checksum_before_arglist[16],
14376 checksum_after_fn[16],
14377 checksum_after_arglist[16];
14378 struct md5_ctx ctx;
14379 hash_table<pointer_hash<const tree_node> > ht (32);
14380 int i;
14381
14382 md5_init_ctx (&ctx);
14383 fold_checksum_tree (fn, &ctx, &ht);
14384 md5_finish_ctx (&ctx, checksum_before_fn);
14385 ht.empty ();
14386
14387 md5_init_ctx (&ctx);
14388 for (i = 0; i < nargs; i++)
14389 fold_checksum_tree (argarray[i], &ctx, &ht);
14390 md5_finish_ctx (&ctx, checksum_before_arglist);
14391 ht.empty ();
14392 #endif
14393
14394 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
14395 if (!tem)
14396 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
14397
14398 #ifdef ENABLE_FOLD_CHECKING
14399 md5_init_ctx (&ctx);
14400 fold_checksum_tree (fn, &ctx, &ht);
14401 md5_finish_ctx (&ctx, checksum_after_fn);
14402 ht.empty ();
14403
14404 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
14405 fold_check_failed (fn, tem);
14406
14407 md5_init_ctx (&ctx);
14408 for (i = 0; i < nargs; i++)
14409 fold_checksum_tree (argarray[i], &ctx, &ht);
14410 md5_finish_ctx (&ctx, checksum_after_arglist);
14411
14412 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
14413 fold_check_failed (NULL_TREE, tem);
14414 #endif
14415 return tem;
14416 }
14417
14418 /* Perform constant folding and related simplification of initializer
14419 expression EXPR. These behave identically to "fold_buildN" but ignore
14420 potential run-time traps and exceptions that fold must preserve. */
14421
14422 #define START_FOLD_INIT \
14423 int saved_signaling_nans = flag_signaling_nans;\
14424 int saved_trapping_math = flag_trapping_math;\
14425 int saved_rounding_math = flag_rounding_math;\
14426 int saved_trapv = flag_trapv;\
14427 int saved_folding_initializer = folding_initializer;\
14428 flag_signaling_nans = 0;\
14429 flag_trapping_math = 0;\
14430 flag_rounding_math = 0;\
14431 flag_trapv = 0;\
14432 folding_initializer = 1;
14433
14434 #define END_FOLD_INIT \
14435 flag_signaling_nans = saved_signaling_nans;\
14436 flag_trapping_math = saved_trapping_math;\
14437 flag_rounding_math = saved_rounding_math;\
14438 flag_trapv = saved_trapv;\
14439 folding_initializer = saved_folding_initializer;
14440
14441 tree
14442 fold_build1_initializer_loc (location_t loc, enum tree_code code,
14443 tree type, tree op)
14444 {
14445 tree result;
14446 START_FOLD_INIT;
14447
14448 result = fold_build1_loc (loc, code, type, op);
14449
14450 END_FOLD_INIT;
14451 return result;
14452 }
14453
14454 tree
14455 fold_build2_initializer_loc (location_t loc, enum tree_code code,
14456 tree type, tree op0, tree op1)
14457 {
14458 tree result;
14459 START_FOLD_INIT;
14460
14461 result = fold_build2_loc (loc, code, type, op0, op1);
14462
14463 END_FOLD_INIT;
14464 return result;
14465 }
14466
14467 tree
14468 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
14469 int nargs, tree *argarray)
14470 {
14471 tree result;
14472 START_FOLD_INIT;
14473
14474 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
14475
14476 END_FOLD_INIT;
14477 return result;
14478 }
14479
14480 #undef START_FOLD_INIT
14481 #undef END_FOLD_INIT
14482
14483 /* Determine if first argument is a multiple of second argument. Return 0 if
14484 it is not, or we cannot easily determined it to be.
14485
14486 An example of the sort of thing we care about (at this point; this routine
14487 could surely be made more general, and expanded to do what the *_DIV_EXPR's
14488 fold cases do now) is discovering that
14489
14490 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14491
14492 is a multiple of
14493
14494 SAVE_EXPR (J * 8)
14495
14496 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
14497
14498 This code also handles discovering that
14499
14500 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
14501
14502 is a multiple of 8 so we don't have to worry about dealing with a
14503 possible remainder.
14504
14505 Note that we *look* inside a SAVE_EXPR only to determine how it was
14506 calculated; it is not safe for fold to do much of anything else with the
14507 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
14508 at run time. For example, the latter example above *cannot* be implemented
14509 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
14510 evaluation time of the original SAVE_EXPR is not necessarily the same at
14511 the time the new expression is evaluated. The only optimization of this
14512 sort that would be valid is changing
14513
14514 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
14515
14516 divided by 8 to
14517
14518 SAVE_EXPR (I) * SAVE_EXPR (J)
14519
14520 (where the same SAVE_EXPR (J) is used in the original and the
14521 transformed version). */
14522
14523 int
14524 multiple_of_p (tree type, const_tree top, const_tree bottom)
14525 {
14526 if (operand_equal_p (top, bottom, 0))
14527 return 1;
14528
14529 if (TREE_CODE (type) != INTEGER_TYPE)
14530 return 0;
14531
14532 switch (TREE_CODE (top))
14533 {
14534 case BIT_AND_EXPR:
14535 /* Bitwise and provides a power of two multiple. If the mask is
14536 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
14537 if (!integer_pow2p (bottom))
14538 return 0;
14539 /* FALLTHRU */
14540
14541 case MULT_EXPR:
14542 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14543 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14544
14545 case PLUS_EXPR:
14546 case MINUS_EXPR:
14547 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
14548 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
14549
14550 case LSHIFT_EXPR:
14551 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
14552 {
14553 tree op1, t1;
14554
14555 op1 = TREE_OPERAND (top, 1);
14556 /* const_binop may not detect overflow correctly,
14557 so check for it explicitly here. */
14558 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
14559 && 0 != (t1 = fold_convert (type,
14560 const_binop (LSHIFT_EXPR,
14561 size_one_node,
14562 op1)))
14563 && !TREE_OVERFLOW (t1))
14564 return multiple_of_p (type, t1, bottom);
14565 }
14566 return 0;
14567
14568 case NOP_EXPR:
14569 /* Can't handle conversions from non-integral or wider integral type. */
14570 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
14571 || (TYPE_PRECISION (type)
14572 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
14573 return 0;
14574
14575 /* .. fall through ... */
14576
14577 case SAVE_EXPR:
14578 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
14579
14580 case COND_EXPR:
14581 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
14582 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
14583
14584 case INTEGER_CST:
14585 if (TREE_CODE (bottom) != INTEGER_CST
14586 || integer_zerop (bottom)
14587 || (TYPE_UNSIGNED (type)
14588 && (tree_int_cst_sgn (top) < 0
14589 || tree_int_cst_sgn (bottom) < 0)))
14590 return 0;
14591 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
14592 SIGNED);
14593
14594 default:
14595 return 0;
14596 }
14597 }
14598
14599 /* Return true if CODE or TYPE is known to be non-negative. */
14600
14601 static bool
14602 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
14603 {
14604 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
14605 && truth_value_p (code))
14606 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
14607 have a signed:1 type (where the value is -1 and 0). */
14608 return true;
14609 return false;
14610 }
14611
14612 /* Return true if (CODE OP0) is known to be non-negative. If the return
14613 value is based on the assumption that signed overflow is undefined,
14614 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14615 *STRICT_OVERFLOW_P. */
14616
14617 bool
14618 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14619 bool *strict_overflow_p)
14620 {
14621 if (TYPE_UNSIGNED (type))
14622 return true;
14623
14624 switch (code)
14625 {
14626 case ABS_EXPR:
14627 /* We can't return 1 if flag_wrapv is set because
14628 ABS_EXPR<INT_MIN> = INT_MIN. */
14629 if (!ANY_INTEGRAL_TYPE_P (type))
14630 return true;
14631 if (TYPE_OVERFLOW_UNDEFINED (type))
14632 {
14633 *strict_overflow_p = true;
14634 return true;
14635 }
14636 break;
14637
14638 case NON_LVALUE_EXPR:
14639 case FLOAT_EXPR:
14640 case FIX_TRUNC_EXPR:
14641 return tree_expr_nonnegative_warnv_p (op0,
14642 strict_overflow_p);
14643
14644 CASE_CONVERT:
14645 {
14646 tree inner_type = TREE_TYPE (op0);
14647 tree outer_type = type;
14648
14649 if (TREE_CODE (outer_type) == REAL_TYPE)
14650 {
14651 if (TREE_CODE (inner_type) == REAL_TYPE)
14652 return tree_expr_nonnegative_warnv_p (op0,
14653 strict_overflow_p);
14654 if (INTEGRAL_TYPE_P (inner_type))
14655 {
14656 if (TYPE_UNSIGNED (inner_type))
14657 return true;
14658 return tree_expr_nonnegative_warnv_p (op0,
14659 strict_overflow_p);
14660 }
14661 }
14662 else if (INTEGRAL_TYPE_P (outer_type))
14663 {
14664 if (TREE_CODE (inner_type) == REAL_TYPE)
14665 return tree_expr_nonnegative_warnv_p (op0,
14666 strict_overflow_p);
14667 if (INTEGRAL_TYPE_P (inner_type))
14668 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
14669 && TYPE_UNSIGNED (inner_type);
14670 }
14671 }
14672 break;
14673
14674 default:
14675 return tree_simple_nonnegative_warnv_p (code, type);
14676 }
14677
14678 /* We don't know sign of `t', so be conservative and return false. */
14679 return false;
14680 }
14681
14682 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
14683 value is based on the assumption that signed overflow is undefined,
14684 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14685 *STRICT_OVERFLOW_P. */
14686
14687 bool
14688 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
14689 tree op1, bool *strict_overflow_p)
14690 {
14691 if (TYPE_UNSIGNED (type))
14692 return true;
14693
14694 switch (code)
14695 {
14696 case POINTER_PLUS_EXPR:
14697 case PLUS_EXPR:
14698 if (FLOAT_TYPE_P (type))
14699 return (tree_expr_nonnegative_warnv_p (op0,
14700 strict_overflow_p)
14701 && tree_expr_nonnegative_warnv_p (op1,
14702 strict_overflow_p));
14703
14704 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
14705 both unsigned and at least 2 bits shorter than the result. */
14706 if (TREE_CODE (type) == INTEGER_TYPE
14707 && TREE_CODE (op0) == NOP_EXPR
14708 && TREE_CODE (op1) == NOP_EXPR)
14709 {
14710 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
14711 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
14712 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
14713 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
14714 {
14715 unsigned int prec = MAX (TYPE_PRECISION (inner1),
14716 TYPE_PRECISION (inner2)) + 1;
14717 return prec < TYPE_PRECISION (type);
14718 }
14719 }
14720 break;
14721
14722 case MULT_EXPR:
14723 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
14724 {
14725 /* x * x is always non-negative for floating point x
14726 or without overflow. */
14727 if (operand_equal_p (op0, op1, 0)
14728 || (tree_expr_nonnegative_warnv_p (op0, strict_overflow_p)
14729 && tree_expr_nonnegative_warnv_p (op1, strict_overflow_p)))
14730 {
14731 if (ANY_INTEGRAL_TYPE_P (type)
14732 && TYPE_OVERFLOW_UNDEFINED (type))
14733 *strict_overflow_p = true;
14734 return true;
14735 }
14736 }
14737
14738 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
14739 both unsigned and their total bits is shorter than the result. */
14740 if (TREE_CODE (type) == INTEGER_TYPE
14741 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
14742 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
14743 {
14744 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
14745 ? TREE_TYPE (TREE_OPERAND (op0, 0))
14746 : TREE_TYPE (op0);
14747 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
14748 ? TREE_TYPE (TREE_OPERAND (op1, 0))
14749 : TREE_TYPE (op1);
14750
14751 bool unsigned0 = TYPE_UNSIGNED (inner0);
14752 bool unsigned1 = TYPE_UNSIGNED (inner1);
14753
14754 if (TREE_CODE (op0) == INTEGER_CST)
14755 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
14756
14757 if (TREE_CODE (op1) == INTEGER_CST)
14758 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
14759
14760 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
14761 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
14762 {
14763 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
14764 ? tree_int_cst_min_precision (op0, UNSIGNED)
14765 : TYPE_PRECISION (inner0);
14766
14767 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
14768 ? tree_int_cst_min_precision (op1, UNSIGNED)
14769 : TYPE_PRECISION (inner1);
14770
14771 return precision0 + precision1 < TYPE_PRECISION (type);
14772 }
14773 }
14774 return false;
14775
14776 case BIT_AND_EXPR:
14777 case MAX_EXPR:
14778 return (tree_expr_nonnegative_warnv_p (op0,
14779 strict_overflow_p)
14780 || tree_expr_nonnegative_warnv_p (op1,
14781 strict_overflow_p));
14782
14783 case BIT_IOR_EXPR:
14784 case BIT_XOR_EXPR:
14785 case MIN_EXPR:
14786 case RDIV_EXPR:
14787 case TRUNC_DIV_EXPR:
14788 case CEIL_DIV_EXPR:
14789 case FLOOR_DIV_EXPR:
14790 case ROUND_DIV_EXPR:
14791 return (tree_expr_nonnegative_warnv_p (op0,
14792 strict_overflow_p)
14793 && tree_expr_nonnegative_warnv_p (op1,
14794 strict_overflow_p));
14795
14796 case TRUNC_MOD_EXPR:
14797 case CEIL_MOD_EXPR:
14798 case FLOOR_MOD_EXPR:
14799 case ROUND_MOD_EXPR:
14800 return tree_expr_nonnegative_warnv_p (op0,
14801 strict_overflow_p);
14802 default:
14803 return tree_simple_nonnegative_warnv_p (code, type);
14804 }
14805
14806 /* We don't know sign of `t', so be conservative and return false. */
14807 return false;
14808 }
14809
14810 /* Return true if T is known to be non-negative. If the return
14811 value is based on the assumption that signed overflow is undefined,
14812 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14813 *STRICT_OVERFLOW_P. */
14814
14815 bool
14816 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14817 {
14818 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14819 return true;
14820
14821 switch (TREE_CODE (t))
14822 {
14823 case INTEGER_CST:
14824 return tree_int_cst_sgn (t) >= 0;
14825
14826 case REAL_CST:
14827 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
14828
14829 case FIXED_CST:
14830 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
14831
14832 case COND_EXPR:
14833 return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
14834 strict_overflow_p)
14835 && tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 2),
14836 strict_overflow_p));
14837 default:
14838 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
14839 TREE_TYPE (t));
14840 }
14841 /* We don't know sign of `t', so be conservative and return false. */
14842 return false;
14843 }
14844
14845 /* Return true if T is known to be non-negative. If the return
14846 value is based on the assumption that signed overflow is undefined,
14847 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14848 *STRICT_OVERFLOW_P. */
14849
14850 bool
14851 tree_call_nonnegative_warnv_p (tree type, tree fndecl,
14852 tree arg0, tree arg1, bool *strict_overflow_p)
14853 {
14854 if (fndecl && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL)
14855 switch (DECL_FUNCTION_CODE (fndecl))
14856 {
14857 CASE_FLT_FN (BUILT_IN_ACOS):
14858 CASE_FLT_FN (BUILT_IN_ACOSH):
14859 CASE_FLT_FN (BUILT_IN_CABS):
14860 CASE_FLT_FN (BUILT_IN_COSH):
14861 CASE_FLT_FN (BUILT_IN_ERFC):
14862 CASE_FLT_FN (BUILT_IN_EXP):
14863 CASE_FLT_FN (BUILT_IN_EXP10):
14864 CASE_FLT_FN (BUILT_IN_EXP2):
14865 CASE_FLT_FN (BUILT_IN_FABS):
14866 CASE_FLT_FN (BUILT_IN_FDIM):
14867 CASE_FLT_FN (BUILT_IN_HYPOT):
14868 CASE_FLT_FN (BUILT_IN_POW10):
14869 CASE_INT_FN (BUILT_IN_FFS):
14870 CASE_INT_FN (BUILT_IN_PARITY):
14871 CASE_INT_FN (BUILT_IN_POPCOUNT):
14872 CASE_INT_FN (BUILT_IN_CLZ):
14873 CASE_INT_FN (BUILT_IN_CLRSB):
14874 case BUILT_IN_BSWAP32:
14875 case BUILT_IN_BSWAP64:
14876 /* Always true. */
14877 return true;
14878
14879 CASE_FLT_FN (BUILT_IN_SQRT):
14880 /* sqrt(-0.0) is -0.0. */
14881 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
14882 return true;
14883 return tree_expr_nonnegative_warnv_p (arg0,
14884 strict_overflow_p);
14885
14886 CASE_FLT_FN (BUILT_IN_ASINH):
14887 CASE_FLT_FN (BUILT_IN_ATAN):
14888 CASE_FLT_FN (BUILT_IN_ATANH):
14889 CASE_FLT_FN (BUILT_IN_CBRT):
14890 CASE_FLT_FN (BUILT_IN_CEIL):
14891 CASE_FLT_FN (BUILT_IN_ERF):
14892 CASE_FLT_FN (BUILT_IN_EXPM1):
14893 CASE_FLT_FN (BUILT_IN_FLOOR):
14894 CASE_FLT_FN (BUILT_IN_FMOD):
14895 CASE_FLT_FN (BUILT_IN_FREXP):
14896 CASE_FLT_FN (BUILT_IN_ICEIL):
14897 CASE_FLT_FN (BUILT_IN_IFLOOR):
14898 CASE_FLT_FN (BUILT_IN_IRINT):
14899 CASE_FLT_FN (BUILT_IN_IROUND):
14900 CASE_FLT_FN (BUILT_IN_LCEIL):
14901 CASE_FLT_FN (BUILT_IN_LDEXP):
14902 CASE_FLT_FN (BUILT_IN_LFLOOR):
14903 CASE_FLT_FN (BUILT_IN_LLCEIL):
14904 CASE_FLT_FN (BUILT_IN_LLFLOOR):
14905 CASE_FLT_FN (BUILT_IN_LLRINT):
14906 CASE_FLT_FN (BUILT_IN_LLROUND):
14907 CASE_FLT_FN (BUILT_IN_LRINT):
14908 CASE_FLT_FN (BUILT_IN_LROUND):
14909 CASE_FLT_FN (BUILT_IN_MODF):
14910 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14911 CASE_FLT_FN (BUILT_IN_RINT):
14912 CASE_FLT_FN (BUILT_IN_ROUND):
14913 CASE_FLT_FN (BUILT_IN_SCALB):
14914 CASE_FLT_FN (BUILT_IN_SCALBLN):
14915 CASE_FLT_FN (BUILT_IN_SCALBN):
14916 CASE_FLT_FN (BUILT_IN_SIGNBIT):
14917 CASE_FLT_FN (BUILT_IN_SIGNIFICAND):
14918 CASE_FLT_FN (BUILT_IN_SINH):
14919 CASE_FLT_FN (BUILT_IN_TANH):
14920 CASE_FLT_FN (BUILT_IN_TRUNC):
14921 /* True if the 1st argument is nonnegative. */
14922 return tree_expr_nonnegative_warnv_p (arg0,
14923 strict_overflow_p);
14924
14925 CASE_FLT_FN (BUILT_IN_FMAX):
14926 /* True if the 1st OR 2nd arguments are nonnegative. */
14927 return (tree_expr_nonnegative_warnv_p (arg0,
14928 strict_overflow_p)
14929 || (tree_expr_nonnegative_warnv_p (arg1,
14930 strict_overflow_p)));
14931
14932 CASE_FLT_FN (BUILT_IN_FMIN):
14933 /* True if the 1st AND 2nd arguments are nonnegative. */
14934 return (tree_expr_nonnegative_warnv_p (arg0,
14935 strict_overflow_p)
14936 && (tree_expr_nonnegative_warnv_p (arg1,
14937 strict_overflow_p)));
14938
14939 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14940 /* True if the 2nd argument is nonnegative. */
14941 return tree_expr_nonnegative_warnv_p (arg1,
14942 strict_overflow_p);
14943
14944 CASE_FLT_FN (BUILT_IN_POWI):
14945 /* True if the 1st argument is nonnegative or the second
14946 argument is an even integer. */
14947 if (TREE_CODE (arg1) == INTEGER_CST
14948 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
14949 return true;
14950 return tree_expr_nonnegative_warnv_p (arg0,
14951 strict_overflow_p);
14952
14953 CASE_FLT_FN (BUILT_IN_POW):
14954 /* True if the 1st argument is nonnegative or the second
14955 argument is an even integer valued real. */
14956 if (TREE_CODE (arg1) == REAL_CST)
14957 {
14958 REAL_VALUE_TYPE c;
14959 HOST_WIDE_INT n;
14960
14961 c = TREE_REAL_CST (arg1);
14962 n = real_to_integer (&c);
14963 if ((n & 1) == 0)
14964 {
14965 REAL_VALUE_TYPE cint;
14966 real_from_integer (&cint, VOIDmode, n, SIGNED);
14967 if (real_identical (&c, &cint))
14968 return true;
14969 }
14970 }
14971 return tree_expr_nonnegative_warnv_p (arg0,
14972 strict_overflow_p);
14973
14974 default:
14975 break;
14976 }
14977 return tree_simple_nonnegative_warnv_p (CALL_EXPR,
14978 type);
14979 }
14980
14981 /* Return true if T is known to be non-negative. If the return
14982 value is based on the assumption that signed overflow is undefined,
14983 set *STRICT_OVERFLOW_P to true; otherwise, don't change
14984 *STRICT_OVERFLOW_P. */
14985
14986 static bool
14987 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
14988 {
14989 enum tree_code code = TREE_CODE (t);
14990 if (TYPE_UNSIGNED (TREE_TYPE (t)))
14991 return true;
14992
14993 switch (code)
14994 {
14995 case TARGET_EXPR:
14996 {
14997 tree temp = TARGET_EXPR_SLOT (t);
14998 t = TARGET_EXPR_INITIAL (t);
14999
15000 /* If the initializer is non-void, then it's a normal expression
15001 that will be assigned to the slot. */
15002 if (!VOID_TYPE_P (t))
15003 return tree_expr_nonnegative_warnv_p (t, strict_overflow_p);
15004
15005 /* Otherwise, the initializer sets the slot in some way. One common
15006 way is an assignment statement at the end of the initializer. */
15007 while (1)
15008 {
15009 if (TREE_CODE (t) == BIND_EXPR)
15010 t = expr_last (BIND_EXPR_BODY (t));
15011 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
15012 || TREE_CODE (t) == TRY_CATCH_EXPR)
15013 t = expr_last (TREE_OPERAND (t, 0));
15014 else if (TREE_CODE (t) == STATEMENT_LIST)
15015 t = expr_last (t);
15016 else
15017 break;
15018 }
15019 if (TREE_CODE (t) == MODIFY_EXPR
15020 && TREE_OPERAND (t, 0) == temp)
15021 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15022 strict_overflow_p);
15023
15024 return false;
15025 }
15026
15027 case CALL_EXPR:
15028 {
15029 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
15030 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
15031
15032 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
15033 get_callee_fndecl (t),
15034 arg0,
15035 arg1,
15036 strict_overflow_p);
15037 }
15038 case COMPOUND_EXPR:
15039 case MODIFY_EXPR:
15040 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 1),
15041 strict_overflow_p);
15042 case BIND_EXPR:
15043 return tree_expr_nonnegative_warnv_p (expr_last (TREE_OPERAND (t, 1)),
15044 strict_overflow_p);
15045 case SAVE_EXPR:
15046 return tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
15047 strict_overflow_p);
15048
15049 default:
15050 return tree_simple_nonnegative_warnv_p (TREE_CODE (t),
15051 TREE_TYPE (t));
15052 }
15053
15054 /* We don't know sign of `t', so be conservative and return false. */
15055 return false;
15056 }
15057
15058 /* Return true if T is known to be non-negative. If the return
15059 value is based on the assumption that signed overflow is undefined,
15060 set *STRICT_OVERFLOW_P to true; otherwise, don't change
15061 *STRICT_OVERFLOW_P. */
15062
15063 bool
15064 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p)
15065 {
15066 enum tree_code code;
15067 if (t == error_mark_node)
15068 return false;
15069
15070 code = TREE_CODE (t);
15071 switch (TREE_CODE_CLASS (code))
15072 {
15073 case tcc_binary:
15074 case tcc_comparison:
15075 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15076 TREE_TYPE (t),
15077 TREE_OPERAND (t, 0),
15078 TREE_OPERAND (t, 1),
15079 strict_overflow_p);
15080
15081 case tcc_unary:
15082 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15083 TREE_TYPE (t),
15084 TREE_OPERAND (t, 0),
15085 strict_overflow_p);
15086
15087 case tcc_constant:
15088 case tcc_declaration:
15089 case tcc_reference:
15090 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15091
15092 default:
15093 break;
15094 }
15095
15096 switch (code)
15097 {
15098 case TRUTH_AND_EXPR:
15099 case TRUTH_OR_EXPR:
15100 case TRUTH_XOR_EXPR:
15101 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
15102 TREE_TYPE (t),
15103 TREE_OPERAND (t, 0),
15104 TREE_OPERAND (t, 1),
15105 strict_overflow_p);
15106 case TRUTH_NOT_EXPR:
15107 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
15108 TREE_TYPE (t),
15109 TREE_OPERAND (t, 0),
15110 strict_overflow_p);
15111
15112 case COND_EXPR:
15113 case CONSTRUCTOR:
15114 case OBJ_TYPE_REF:
15115 case ASSERT_EXPR:
15116 case ADDR_EXPR:
15117 case WITH_SIZE_EXPR:
15118 case SSA_NAME:
15119 return tree_single_nonnegative_warnv_p (t, strict_overflow_p);
15120
15121 default:
15122 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p);
15123 }
15124 }
15125
15126 /* Return true if `t' is known to be non-negative. Handle warnings
15127 about undefined signed overflow. */
15128
15129 bool
15130 tree_expr_nonnegative_p (tree t)
15131 {
15132 bool ret, strict_overflow_p;
15133
15134 strict_overflow_p = false;
15135 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
15136 if (strict_overflow_p)
15137 fold_overflow_warning (("assuming signed overflow does not occur when "
15138 "determining that expression is always "
15139 "non-negative"),
15140 WARN_STRICT_OVERFLOW_MISC);
15141 return ret;
15142 }
15143
15144
15145 /* Return true when (CODE OP0) is an address and is known to be nonzero.
15146 For floating point we further ensure that T is not denormal.
15147 Similar logic is present in nonzero_address in rtlanal.h.
15148
15149 If the return value is based on the assumption that signed overflow
15150 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15151 change *STRICT_OVERFLOW_P. */
15152
15153 bool
15154 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
15155 bool *strict_overflow_p)
15156 {
15157 switch (code)
15158 {
15159 case ABS_EXPR:
15160 return tree_expr_nonzero_warnv_p (op0,
15161 strict_overflow_p);
15162
15163 case NOP_EXPR:
15164 {
15165 tree inner_type = TREE_TYPE (op0);
15166 tree outer_type = type;
15167
15168 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
15169 && tree_expr_nonzero_warnv_p (op0,
15170 strict_overflow_p));
15171 }
15172 break;
15173
15174 case NON_LVALUE_EXPR:
15175 return tree_expr_nonzero_warnv_p (op0,
15176 strict_overflow_p);
15177
15178 default:
15179 break;
15180 }
15181
15182 return false;
15183 }
15184
15185 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
15186 For floating point we further ensure that T is not denormal.
15187 Similar logic is present in nonzero_address in rtlanal.h.
15188
15189 If the return value is based on the assumption that signed overflow
15190 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15191 change *STRICT_OVERFLOW_P. */
15192
15193 bool
15194 tree_binary_nonzero_warnv_p (enum tree_code code,
15195 tree type,
15196 tree op0,
15197 tree op1, bool *strict_overflow_p)
15198 {
15199 bool sub_strict_overflow_p;
15200 switch (code)
15201 {
15202 case POINTER_PLUS_EXPR:
15203 case PLUS_EXPR:
15204 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
15205 {
15206 /* With the presence of negative values it is hard
15207 to say something. */
15208 sub_strict_overflow_p = false;
15209 if (!tree_expr_nonnegative_warnv_p (op0,
15210 &sub_strict_overflow_p)
15211 || !tree_expr_nonnegative_warnv_p (op1,
15212 &sub_strict_overflow_p))
15213 return false;
15214 /* One of operands must be positive and the other non-negative. */
15215 /* We don't set *STRICT_OVERFLOW_P here: even if this value
15216 overflows, on a twos-complement machine the sum of two
15217 nonnegative numbers can never be zero. */
15218 return (tree_expr_nonzero_warnv_p (op0,
15219 strict_overflow_p)
15220 || tree_expr_nonzero_warnv_p (op1,
15221 strict_overflow_p));
15222 }
15223 break;
15224
15225 case MULT_EXPR:
15226 if (TYPE_OVERFLOW_UNDEFINED (type))
15227 {
15228 if (tree_expr_nonzero_warnv_p (op0,
15229 strict_overflow_p)
15230 && tree_expr_nonzero_warnv_p (op1,
15231 strict_overflow_p))
15232 {
15233 *strict_overflow_p = true;
15234 return true;
15235 }
15236 }
15237 break;
15238
15239 case MIN_EXPR:
15240 sub_strict_overflow_p = false;
15241 if (tree_expr_nonzero_warnv_p (op0,
15242 &sub_strict_overflow_p)
15243 && tree_expr_nonzero_warnv_p (op1,
15244 &sub_strict_overflow_p))
15245 {
15246 if (sub_strict_overflow_p)
15247 *strict_overflow_p = true;
15248 }
15249 break;
15250
15251 case MAX_EXPR:
15252 sub_strict_overflow_p = false;
15253 if (tree_expr_nonzero_warnv_p (op0,
15254 &sub_strict_overflow_p))
15255 {
15256 if (sub_strict_overflow_p)
15257 *strict_overflow_p = true;
15258
15259 /* When both operands are nonzero, then MAX must be too. */
15260 if (tree_expr_nonzero_warnv_p (op1,
15261 strict_overflow_p))
15262 return true;
15263
15264 /* MAX where operand 0 is positive is positive. */
15265 return tree_expr_nonnegative_warnv_p (op0,
15266 strict_overflow_p);
15267 }
15268 /* MAX where operand 1 is positive is positive. */
15269 else if (tree_expr_nonzero_warnv_p (op1,
15270 &sub_strict_overflow_p)
15271 && tree_expr_nonnegative_warnv_p (op1,
15272 &sub_strict_overflow_p))
15273 {
15274 if (sub_strict_overflow_p)
15275 *strict_overflow_p = true;
15276 return true;
15277 }
15278 break;
15279
15280 case BIT_IOR_EXPR:
15281 return (tree_expr_nonzero_warnv_p (op1,
15282 strict_overflow_p)
15283 || tree_expr_nonzero_warnv_p (op0,
15284 strict_overflow_p));
15285
15286 default:
15287 break;
15288 }
15289
15290 return false;
15291 }
15292
15293 /* Return true when T is an address and is known to be nonzero.
15294 For floating point we further ensure that T is not denormal.
15295 Similar logic is present in nonzero_address in rtlanal.h.
15296
15297 If the return value is based on the assumption that signed overflow
15298 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
15299 change *STRICT_OVERFLOW_P. */
15300
15301 bool
15302 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
15303 {
15304 bool sub_strict_overflow_p;
15305 switch (TREE_CODE (t))
15306 {
15307 case INTEGER_CST:
15308 return !integer_zerop (t);
15309
15310 case ADDR_EXPR:
15311 {
15312 tree base = TREE_OPERAND (t, 0);
15313
15314 if (!DECL_P (base))
15315 base = get_base_address (base);
15316
15317 if (!base)
15318 return false;
15319
15320 /* For objects in symbol table check if we know they are non-zero.
15321 Don't do anything for variables and functions before symtab is built;
15322 it is quite possible that they will be declared weak later. */
15323 if (DECL_P (base) && decl_in_symtab_p (base))
15324 {
15325 struct symtab_node *symbol;
15326
15327 symbol = symtab_node::get_create (base);
15328 if (symbol)
15329 return symbol->nonzero_address ();
15330 else
15331 return false;
15332 }
15333
15334 /* Function local objects are never NULL. */
15335 if (DECL_P (base)
15336 && (DECL_CONTEXT (base)
15337 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
15338 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
15339 return true;
15340
15341 /* Constants are never weak. */
15342 if (CONSTANT_CLASS_P (base))
15343 return true;
15344
15345 return false;
15346 }
15347
15348 case COND_EXPR:
15349 sub_strict_overflow_p = false;
15350 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
15351 &sub_strict_overflow_p)
15352 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
15353 &sub_strict_overflow_p))
15354 {
15355 if (sub_strict_overflow_p)
15356 *strict_overflow_p = true;
15357 return true;
15358 }
15359 break;
15360
15361 default:
15362 break;
15363 }
15364 return false;
15365 }
15366
15367 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
15368 attempt to fold the expression to a constant without modifying TYPE,
15369 OP0 or OP1.
15370
15371 If the expression could be simplified to a constant, then return
15372 the constant. If the expression would not be simplified to a
15373 constant, then return NULL_TREE. */
15374
15375 tree
15376 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
15377 {
15378 tree tem = fold_binary (code, type, op0, op1);
15379 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15380 }
15381
15382 /* Given the components of a unary expression CODE, TYPE and OP0,
15383 attempt to fold the expression to a constant without modifying
15384 TYPE or OP0.
15385
15386 If the expression could be simplified to a constant, then return
15387 the constant. If the expression would not be simplified to a
15388 constant, then return NULL_TREE. */
15389
15390 tree
15391 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
15392 {
15393 tree tem = fold_unary (code, type, op0);
15394 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
15395 }
15396
15397 /* If EXP represents referencing an element in a constant string
15398 (either via pointer arithmetic or array indexing), return the
15399 tree representing the value accessed, otherwise return NULL. */
15400
15401 tree
15402 fold_read_from_constant_string (tree exp)
15403 {
15404 if ((TREE_CODE (exp) == INDIRECT_REF
15405 || TREE_CODE (exp) == ARRAY_REF)
15406 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
15407 {
15408 tree exp1 = TREE_OPERAND (exp, 0);
15409 tree index;
15410 tree string;
15411 location_t loc = EXPR_LOCATION (exp);
15412
15413 if (TREE_CODE (exp) == INDIRECT_REF)
15414 string = string_constant (exp1, &index);
15415 else
15416 {
15417 tree low_bound = array_ref_low_bound (exp);
15418 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
15419
15420 /* Optimize the special-case of a zero lower bound.
15421
15422 We convert the low_bound to sizetype to avoid some problems
15423 with constant folding. (E.g. suppose the lower bound is 1,
15424 and its mode is QI. Without the conversion,l (ARRAY
15425 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
15426 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
15427 if (! integer_zerop (low_bound))
15428 index = size_diffop_loc (loc, index,
15429 fold_convert_loc (loc, sizetype, low_bound));
15430
15431 string = exp1;
15432 }
15433
15434 if (string
15435 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
15436 && TREE_CODE (string) == STRING_CST
15437 && TREE_CODE (index) == INTEGER_CST
15438 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
15439 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
15440 == MODE_INT)
15441 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
15442 return build_int_cst_type (TREE_TYPE (exp),
15443 (TREE_STRING_POINTER (string)
15444 [TREE_INT_CST_LOW (index)]));
15445 }
15446 return NULL;
15447 }
15448
15449 /* Return the tree for neg (ARG0) when ARG0 is known to be either
15450 an integer constant, real, or fixed-point constant.
15451
15452 TYPE is the type of the result. */
15453
15454 static tree
15455 fold_negate_const (tree arg0, tree type)
15456 {
15457 tree t = NULL_TREE;
15458
15459 switch (TREE_CODE (arg0))
15460 {
15461 case INTEGER_CST:
15462 {
15463 bool overflow;
15464 wide_int val = wi::neg (arg0, &overflow);
15465 t = force_fit_type (type, val, 1,
15466 (overflow | TREE_OVERFLOW (arg0))
15467 && !TYPE_UNSIGNED (type));
15468 break;
15469 }
15470
15471 case REAL_CST:
15472 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15473 break;
15474
15475 case FIXED_CST:
15476 {
15477 FIXED_VALUE_TYPE f;
15478 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
15479 &(TREE_FIXED_CST (arg0)), NULL,
15480 TYPE_SATURATING (type));
15481 t = build_fixed (type, f);
15482 /* Propagate overflow flags. */
15483 if (overflow_p | TREE_OVERFLOW (arg0))
15484 TREE_OVERFLOW (t) = 1;
15485 break;
15486 }
15487
15488 default:
15489 gcc_unreachable ();
15490 }
15491
15492 return t;
15493 }
15494
15495 /* Return the tree for abs (ARG0) when ARG0 is known to be either
15496 an integer constant or real constant.
15497
15498 TYPE is the type of the result. */
15499
15500 tree
15501 fold_abs_const (tree arg0, tree type)
15502 {
15503 tree t = NULL_TREE;
15504
15505 switch (TREE_CODE (arg0))
15506 {
15507 case INTEGER_CST:
15508 {
15509 /* If the value is unsigned or non-negative, then the absolute value
15510 is the same as the ordinary value. */
15511 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
15512 t = arg0;
15513
15514 /* If the value is negative, then the absolute value is
15515 its negation. */
15516 else
15517 {
15518 bool overflow;
15519 wide_int val = wi::neg (arg0, &overflow);
15520 t = force_fit_type (type, val, -1,
15521 overflow | TREE_OVERFLOW (arg0));
15522 }
15523 }
15524 break;
15525
15526 case REAL_CST:
15527 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
15528 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
15529 else
15530 t = arg0;
15531 break;
15532
15533 default:
15534 gcc_unreachable ();
15535 }
15536
15537 return t;
15538 }
15539
15540 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
15541 constant. TYPE is the type of the result. */
15542
15543 static tree
15544 fold_not_const (const_tree arg0, tree type)
15545 {
15546 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
15547
15548 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
15549 }
15550
15551 /* Given CODE, a relational operator, the target type, TYPE and two
15552 constant operands OP0 and OP1, return the result of the
15553 relational operation. If the result is not a compile time
15554 constant, then return NULL_TREE. */
15555
15556 static tree
15557 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
15558 {
15559 int result, invert;
15560
15561 /* From here on, the only cases we handle are when the result is
15562 known to be a constant. */
15563
15564 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
15565 {
15566 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
15567 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
15568
15569 /* Handle the cases where either operand is a NaN. */
15570 if (real_isnan (c0) || real_isnan (c1))
15571 {
15572 switch (code)
15573 {
15574 case EQ_EXPR:
15575 case ORDERED_EXPR:
15576 result = 0;
15577 break;
15578
15579 case NE_EXPR:
15580 case UNORDERED_EXPR:
15581 case UNLT_EXPR:
15582 case UNLE_EXPR:
15583 case UNGT_EXPR:
15584 case UNGE_EXPR:
15585 case UNEQ_EXPR:
15586 result = 1;
15587 break;
15588
15589 case LT_EXPR:
15590 case LE_EXPR:
15591 case GT_EXPR:
15592 case GE_EXPR:
15593 case LTGT_EXPR:
15594 if (flag_trapping_math)
15595 return NULL_TREE;
15596 result = 0;
15597 break;
15598
15599 default:
15600 gcc_unreachable ();
15601 }
15602
15603 return constant_boolean_node (result, type);
15604 }
15605
15606 return constant_boolean_node (real_compare (code, c0, c1), type);
15607 }
15608
15609 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
15610 {
15611 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
15612 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
15613 return constant_boolean_node (fixed_compare (code, c0, c1), type);
15614 }
15615
15616 /* Handle equality/inequality of complex constants. */
15617 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
15618 {
15619 tree rcond = fold_relational_const (code, type,
15620 TREE_REALPART (op0),
15621 TREE_REALPART (op1));
15622 tree icond = fold_relational_const (code, type,
15623 TREE_IMAGPART (op0),
15624 TREE_IMAGPART (op1));
15625 if (code == EQ_EXPR)
15626 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
15627 else if (code == NE_EXPR)
15628 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
15629 else
15630 return NULL_TREE;
15631 }
15632
15633 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
15634 {
15635 unsigned count = VECTOR_CST_NELTS (op0);
15636 tree *elts = XALLOCAVEC (tree, count);
15637 gcc_assert (VECTOR_CST_NELTS (op1) == count
15638 && TYPE_VECTOR_SUBPARTS (type) == count);
15639
15640 for (unsigned i = 0; i < count; i++)
15641 {
15642 tree elem_type = TREE_TYPE (type);
15643 tree elem0 = VECTOR_CST_ELT (op0, i);
15644 tree elem1 = VECTOR_CST_ELT (op1, i);
15645
15646 tree tem = fold_relational_const (code, elem_type,
15647 elem0, elem1);
15648
15649 if (tem == NULL_TREE)
15650 return NULL_TREE;
15651
15652 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
15653 }
15654
15655 return build_vector (type, elts);
15656 }
15657
15658 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
15659
15660 To compute GT, swap the arguments and do LT.
15661 To compute GE, do LT and invert the result.
15662 To compute LE, swap the arguments, do LT and invert the result.
15663 To compute NE, do EQ and invert the result.
15664
15665 Therefore, the code below must handle only EQ and LT. */
15666
15667 if (code == LE_EXPR || code == GT_EXPR)
15668 {
15669 tree tem = op0;
15670 op0 = op1;
15671 op1 = tem;
15672 code = swap_tree_comparison (code);
15673 }
15674
15675 /* Note that it is safe to invert for real values here because we
15676 have already handled the one case that it matters. */
15677
15678 invert = 0;
15679 if (code == NE_EXPR || code == GE_EXPR)
15680 {
15681 invert = 1;
15682 code = invert_tree_comparison (code, false);
15683 }
15684
15685 /* Compute a result for LT or EQ if args permit;
15686 Otherwise return T. */
15687 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
15688 {
15689 if (code == EQ_EXPR)
15690 result = tree_int_cst_equal (op0, op1);
15691 else
15692 result = tree_int_cst_lt (op0, op1);
15693 }
15694 else
15695 return NULL_TREE;
15696
15697 if (invert)
15698 result ^= 1;
15699 return constant_boolean_node (result, type);
15700 }
15701
15702 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
15703 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
15704 itself. */
15705
15706 tree
15707 fold_build_cleanup_point_expr (tree type, tree expr)
15708 {
15709 /* If the expression does not have side effects then we don't have to wrap
15710 it with a cleanup point expression. */
15711 if (!TREE_SIDE_EFFECTS (expr))
15712 return expr;
15713
15714 /* If the expression is a return, check to see if the expression inside the
15715 return has no side effects or the right hand side of the modify expression
15716 inside the return. If either don't have side effects set we don't need to
15717 wrap the expression in a cleanup point expression. Note we don't check the
15718 left hand side of the modify because it should always be a return decl. */
15719 if (TREE_CODE (expr) == RETURN_EXPR)
15720 {
15721 tree op = TREE_OPERAND (expr, 0);
15722 if (!op || !TREE_SIDE_EFFECTS (op))
15723 return expr;
15724 op = TREE_OPERAND (op, 1);
15725 if (!TREE_SIDE_EFFECTS (op))
15726 return expr;
15727 }
15728
15729 return build1 (CLEANUP_POINT_EXPR, type, expr);
15730 }
15731
15732 /* Given a pointer value OP0 and a type TYPE, return a simplified version
15733 of an indirection through OP0, or NULL_TREE if no simplification is
15734 possible. */
15735
15736 tree
15737 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
15738 {
15739 tree sub = op0;
15740 tree subtype;
15741
15742 STRIP_NOPS (sub);
15743 subtype = TREE_TYPE (sub);
15744 if (!POINTER_TYPE_P (subtype))
15745 return NULL_TREE;
15746
15747 if (TREE_CODE (sub) == ADDR_EXPR)
15748 {
15749 tree op = TREE_OPERAND (sub, 0);
15750 tree optype = TREE_TYPE (op);
15751 /* *&CONST_DECL -> to the value of the const decl. */
15752 if (TREE_CODE (op) == CONST_DECL)
15753 return DECL_INITIAL (op);
15754 /* *&p => p; make sure to handle *&"str"[cst] here. */
15755 if (type == optype)
15756 {
15757 tree fop = fold_read_from_constant_string (op);
15758 if (fop)
15759 return fop;
15760 else
15761 return op;
15762 }
15763 /* *(foo *)&fooarray => fooarray[0] */
15764 else if (TREE_CODE (optype) == ARRAY_TYPE
15765 && type == TREE_TYPE (optype)
15766 && (!in_gimple_form
15767 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15768 {
15769 tree type_domain = TYPE_DOMAIN (optype);
15770 tree min_val = size_zero_node;
15771 if (type_domain && TYPE_MIN_VALUE (type_domain))
15772 min_val = TYPE_MIN_VALUE (type_domain);
15773 if (in_gimple_form
15774 && TREE_CODE (min_val) != INTEGER_CST)
15775 return NULL_TREE;
15776 return build4_loc (loc, ARRAY_REF, type, op, min_val,
15777 NULL_TREE, NULL_TREE);
15778 }
15779 /* *(foo *)&complexfoo => __real__ complexfoo */
15780 else if (TREE_CODE (optype) == COMPLEX_TYPE
15781 && type == TREE_TYPE (optype))
15782 return fold_build1_loc (loc, REALPART_EXPR, type, op);
15783 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
15784 else if (TREE_CODE (optype) == VECTOR_TYPE
15785 && type == TREE_TYPE (optype))
15786 {
15787 tree part_width = TYPE_SIZE (type);
15788 tree index = bitsize_int (0);
15789 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
15790 }
15791 }
15792
15793 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
15794 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
15795 {
15796 tree op00 = TREE_OPERAND (sub, 0);
15797 tree op01 = TREE_OPERAND (sub, 1);
15798
15799 STRIP_NOPS (op00);
15800 if (TREE_CODE (op00) == ADDR_EXPR)
15801 {
15802 tree op00type;
15803 op00 = TREE_OPERAND (op00, 0);
15804 op00type = TREE_TYPE (op00);
15805
15806 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
15807 if (TREE_CODE (op00type) == VECTOR_TYPE
15808 && type == TREE_TYPE (op00type))
15809 {
15810 HOST_WIDE_INT offset = tree_to_shwi (op01);
15811 tree part_width = TYPE_SIZE (type);
15812 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
15813 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
15814 tree index = bitsize_int (indexi);
15815
15816 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
15817 return fold_build3_loc (loc,
15818 BIT_FIELD_REF, type, op00,
15819 part_width, index);
15820
15821 }
15822 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
15823 else if (TREE_CODE (op00type) == COMPLEX_TYPE
15824 && type == TREE_TYPE (op00type))
15825 {
15826 tree size = TYPE_SIZE_UNIT (type);
15827 if (tree_int_cst_equal (size, op01))
15828 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
15829 }
15830 /* ((foo *)&fooarray)[1] => fooarray[1] */
15831 else if (TREE_CODE (op00type) == ARRAY_TYPE
15832 && type == TREE_TYPE (op00type))
15833 {
15834 tree type_domain = TYPE_DOMAIN (op00type);
15835 tree min_val = size_zero_node;
15836 if (type_domain && TYPE_MIN_VALUE (type_domain))
15837 min_val = TYPE_MIN_VALUE (type_domain);
15838 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
15839 TYPE_SIZE_UNIT (type));
15840 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
15841 return build4_loc (loc, ARRAY_REF, type, op00, op01,
15842 NULL_TREE, NULL_TREE);
15843 }
15844 }
15845 }
15846
15847 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
15848 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
15849 && type == TREE_TYPE (TREE_TYPE (subtype))
15850 && (!in_gimple_form
15851 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
15852 {
15853 tree type_domain;
15854 tree min_val = size_zero_node;
15855 sub = build_fold_indirect_ref_loc (loc, sub);
15856 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
15857 if (type_domain && TYPE_MIN_VALUE (type_domain))
15858 min_val = TYPE_MIN_VALUE (type_domain);
15859 if (in_gimple_form
15860 && TREE_CODE (min_val) != INTEGER_CST)
15861 return NULL_TREE;
15862 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
15863 NULL_TREE);
15864 }
15865
15866 return NULL_TREE;
15867 }
15868
15869 /* Builds an expression for an indirection through T, simplifying some
15870 cases. */
15871
15872 tree
15873 build_fold_indirect_ref_loc (location_t loc, tree t)
15874 {
15875 tree type = TREE_TYPE (TREE_TYPE (t));
15876 tree sub = fold_indirect_ref_1 (loc, type, t);
15877
15878 if (sub)
15879 return sub;
15880
15881 return build1_loc (loc, INDIRECT_REF, type, t);
15882 }
15883
15884 /* Given an INDIRECT_REF T, return either T or a simplified version. */
15885
15886 tree
15887 fold_indirect_ref_loc (location_t loc, tree t)
15888 {
15889 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
15890
15891 if (sub)
15892 return sub;
15893 else
15894 return t;
15895 }
15896
15897 /* Strip non-trapping, non-side-effecting tree nodes from an expression
15898 whose result is ignored. The type of the returned tree need not be
15899 the same as the original expression. */
15900
15901 tree
15902 fold_ignored_result (tree t)
15903 {
15904 if (!TREE_SIDE_EFFECTS (t))
15905 return integer_zero_node;
15906
15907 for (;;)
15908 switch (TREE_CODE_CLASS (TREE_CODE (t)))
15909 {
15910 case tcc_unary:
15911 t = TREE_OPERAND (t, 0);
15912 break;
15913
15914 case tcc_binary:
15915 case tcc_comparison:
15916 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15917 t = TREE_OPERAND (t, 0);
15918 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
15919 t = TREE_OPERAND (t, 1);
15920 else
15921 return t;
15922 break;
15923
15924 case tcc_expression:
15925 switch (TREE_CODE (t))
15926 {
15927 case COMPOUND_EXPR:
15928 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
15929 return t;
15930 t = TREE_OPERAND (t, 0);
15931 break;
15932
15933 case COND_EXPR:
15934 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
15935 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
15936 return t;
15937 t = TREE_OPERAND (t, 0);
15938 break;
15939
15940 default:
15941 return t;
15942 }
15943 break;
15944
15945 default:
15946 return t;
15947 }
15948 }
15949
15950 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
15951
15952 tree
15953 round_up_loc (location_t loc, tree value, unsigned int divisor)
15954 {
15955 tree div = NULL_TREE;
15956
15957 if (divisor == 1)
15958 return value;
15959
15960 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
15961 have to do anything. Only do this when we are not given a const,
15962 because in that case, this check is more expensive than just
15963 doing it. */
15964 if (TREE_CODE (value) != INTEGER_CST)
15965 {
15966 div = build_int_cst (TREE_TYPE (value), divisor);
15967
15968 if (multiple_of_p (TREE_TYPE (value), value, div))
15969 return value;
15970 }
15971
15972 /* If divisor is a power of two, simplify this to bit manipulation. */
15973 if (divisor == (divisor & -divisor))
15974 {
15975 if (TREE_CODE (value) == INTEGER_CST)
15976 {
15977 wide_int val = value;
15978 bool overflow_p;
15979
15980 if ((val & (divisor - 1)) == 0)
15981 return value;
15982
15983 overflow_p = TREE_OVERFLOW (value);
15984 val += divisor - 1;
15985 val &= - (int) divisor;
15986 if (val == 0)
15987 overflow_p = true;
15988
15989 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
15990 }
15991 else
15992 {
15993 tree t;
15994
15995 t = build_int_cst (TREE_TYPE (value), divisor - 1);
15996 value = size_binop_loc (loc, PLUS_EXPR, value, t);
15997 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
15998 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
15999 }
16000 }
16001 else
16002 {
16003 if (!div)
16004 div = build_int_cst (TREE_TYPE (value), divisor);
16005 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
16006 value = size_binop_loc (loc, MULT_EXPR, value, div);
16007 }
16008
16009 return value;
16010 }
16011
16012 /* Likewise, but round down. */
16013
16014 tree
16015 round_down_loc (location_t loc, tree value, int divisor)
16016 {
16017 tree div = NULL_TREE;
16018
16019 gcc_assert (divisor > 0);
16020 if (divisor == 1)
16021 return value;
16022
16023 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
16024 have to do anything. Only do this when we are not given a const,
16025 because in that case, this check is more expensive than just
16026 doing it. */
16027 if (TREE_CODE (value) != INTEGER_CST)
16028 {
16029 div = build_int_cst (TREE_TYPE (value), divisor);
16030
16031 if (multiple_of_p (TREE_TYPE (value), value, div))
16032 return value;
16033 }
16034
16035 /* If divisor is a power of two, simplify this to bit manipulation. */
16036 if (divisor == (divisor & -divisor))
16037 {
16038 tree t;
16039
16040 t = build_int_cst (TREE_TYPE (value), -divisor);
16041 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
16042 }
16043 else
16044 {
16045 if (!div)
16046 div = build_int_cst (TREE_TYPE (value), divisor);
16047 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
16048 value = size_binop_loc (loc, MULT_EXPR, value, div);
16049 }
16050
16051 return value;
16052 }
16053
16054 /* Returns the pointer to the base of the object addressed by EXP and
16055 extracts the information about the offset of the access, storing it
16056 to PBITPOS and POFFSET. */
16057
16058 static tree
16059 split_address_to_core_and_offset (tree exp,
16060 HOST_WIDE_INT *pbitpos, tree *poffset)
16061 {
16062 tree core;
16063 machine_mode mode;
16064 int unsignedp, volatilep;
16065 HOST_WIDE_INT bitsize;
16066 location_t loc = EXPR_LOCATION (exp);
16067
16068 if (TREE_CODE (exp) == ADDR_EXPR)
16069 {
16070 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
16071 poffset, &mode, &unsignedp, &volatilep,
16072 false);
16073 core = build_fold_addr_expr_loc (loc, core);
16074 }
16075 else
16076 {
16077 core = exp;
16078 *pbitpos = 0;
16079 *poffset = NULL_TREE;
16080 }
16081
16082 return core;
16083 }
16084
16085 /* Returns true if addresses of E1 and E2 differ by a constant, false
16086 otherwise. If they do, E1 - E2 is stored in *DIFF. */
16087
16088 bool
16089 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
16090 {
16091 tree core1, core2;
16092 HOST_WIDE_INT bitpos1, bitpos2;
16093 tree toffset1, toffset2, tdiff, type;
16094
16095 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
16096 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
16097
16098 if (bitpos1 % BITS_PER_UNIT != 0
16099 || bitpos2 % BITS_PER_UNIT != 0
16100 || !operand_equal_p (core1, core2, 0))
16101 return false;
16102
16103 if (toffset1 && toffset2)
16104 {
16105 type = TREE_TYPE (toffset1);
16106 if (type != TREE_TYPE (toffset2))
16107 toffset2 = fold_convert (type, toffset2);
16108
16109 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
16110 if (!cst_and_fits_in_hwi (tdiff))
16111 return false;
16112
16113 *diff = int_cst_value (tdiff);
16114 }
16115 else if (toffset1 || toffset2)
16116 {
16117 /* If only one of the offsets is non-constant, the difference cannot
16118 be a constant. */
16119 return false;
16120 }
16121 else
16122 *diff = 0;
16123
16124 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
16125 return true;
16126 }
16127
16128 /* Simplify the floating point expression EXP when the sign of the
16129 result is not significant. Return NULL_TREE if no simplification
16130 is possible. */
16131
16132 tree
16133 fold_strip_sign_ops (tree exp)
16134 {
16135 tree arg0, arg1;
16136 location_t loc = EXPR_LOCATION (exp);
16137
16138 switch (TREE_CODE (exp))
16139 {
16140 case ABS_EXPR:
16141 case NEGATE_EXPR:
16142 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16143 return arg0 ? arg0 : TREE_OPERAND (exp, 0);
16144
16145 case MULT_EXPR:
16146 case RDIV_EXPR:
16147 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (exp)))
16148 return NULL_TREE;
16149 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 0));
16150 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16151 if (arg0 != NULL_TREE || arg1 != NULL_TREE)
16152 return fold_build2_loc (loc, TREE_CODE (exp), TREE_TYPE (exp),
16153 arg0 ? arg0 : TREE_OPERAND (exp, 0),
16154 arg1 ? arg1 : TREE_OPERAND (exp, 1));
16155 break;
16156
16157 case COMPOUND_EXPR:
16158 arg0 = TREE_OPERAND (exp, 0);
16159 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16160 if (arg1)
16161 return fold_build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (exp), arg0, arg1);
16162 break;
16163
16164 case COND_EXPR:
16165 arg0 = fold_strip_sign_ops (TREE_OPERAND (exp, 1));
16166 arg1 = fold_strip_sign_ops (TREE_OPERAND (exp, 2));
16167 if (arg0 || arg1)
16168 return fold_build3_loc (loc,
16169 COND_EXPR, TREE_TYPE (exp), TREE_OPERAND (exp, 0),
16170 arg0 ? arg0 : TREE_OPERAND (exp, 1),
16171 arg1 ? arg1 : TREE_OPERAND (exp, 2));
16172 break;
16173
16174 case CALL_EXPR:
16175 {
16176 const enum built_in_function fcode = builtin_mathfn_code (exp);
16177 switch (fcode)
16178 {
16179 CASE_FLT_FN (BUILT_IN_COPYSIGN):
16180 /* Strip copysign function call, return the 1st argument. */
16181 arg0 = CALL_EXPR_ARG (exp, 0);
16182 arg1 = CALL_EXPR_ARG (exp, 1);
16183 return omit_one_operand_loc (loc, TREE_TYPE (exp), arg0, arg1);
16184
16185 default:
16186 /* Strip sign ops from the argument of "odd" math functions. */
16187 if (negate_mathfn_p (fcode))
16188 {
16189 arg0 = fold_strip_sign_ops (CALL_EXPR_ARG (exp, 0));
16190 if (arg0)
16191 return build_call_expr_loc (loc, get_callee_fndecl (exp), 1, arg0);
16192 }
16193 break;
16194 }
16195 }
16196 break;
16197
16198 default:
16199 break;
16200 }
16201 return NULL_TREE;
16202 }
16203
16204 /* Return OFF converted to a pointer offset type suitable as offset for
16205 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
16206 tree
16207 convert_to_ptrofftype_loc (location_t loc, tree off)
16208 {
16209 return fold_convert_loc (loc, sizetype, off);
16210 }
16211
16212 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16213 tree
16214 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
16215 {
16216 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16217 ptr, convert_to_ptrofftype_loc (loc, off));
16218 }
16219
16220 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
16221 tree
16222 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
16223 {
16224 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
16225 ptr, size_int (off));
16226 }