re PR middle-end/70843 (ICE in add_expr, at tree.c:7913)
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (location_t, tree, tree, enum tree_code,
113 tree *, tree *, tree *, int);
114 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
115 static enum comparison_code comparison_to_compcode (enum tree_code);
116 static enum tree_code compcode_to_comparison (enum comparison_code);
117 static int operand_equal_for_comparison_p (tree, tree, tree);
118 static int twoval_comparison_p (tree, tree *, tree *, int *);
119 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
120 static tree make_bit_field_ref (location_t, tree, tree,
121 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
122 static tree optimize_bit_field_compare (location_t, enum tree_code,
123 tree, tree, tree);
124 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
125 HOST_WIDE_INT *,
126 machine_mode *, int *, int *, int *,
127 tree *, tree *);
128 static int simple_operand_p (const_tree);
129 static bool simple_operand_p_2 (tree);
130 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
131 static tree range_predecessor (tree);
132 static tree range_successor (tree);
133 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
134 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
135 static tree unextend (tree, int, int, tree);
136 static tree optimize_minmax_comparison (location_t, enum tree_code,
137 tree, tree, tree);
138 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
139 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
140 static tree fold_binary_op_with_conditional_arg (location_t,
141 enum tree_code, tree,
142 tree, tree,
143 tree, tree, int);
144 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
145 static bool reorder_operands_p (const_tree, const_tree);
146 static tree fold_negate_const (tree, tree);
147 static tree fold_not_const (const_tree, tree);
148 static tree fold_relational_const (enum tree_code, tree, tree, tree);
149 static tree fold_convert_const (enum tree_code, tree, tree);
150 static tree fold_view_convert_expr (tree, tree);
151 static bool vec_cst_ctor_to_array (tree, tree *);
152
153
154 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
155 Otherwise, return LOC. */
156
157 static location_t
158 expr_location_or (tree t, location_t loc)
159 {
160 location_t tloc = EXPR_LOCATION (t);
161 return tloc == UNKNOWN_LOCATION ? loc : tloc;
162 }
163
164 /* Similar to protected_set_expr_location, but never modify x in place,
165 if location can and needs to be set, unshare it. */
166
167 static inline tree
168 protected_set_expr_location_unshare (tree x, location_t loc)
169 {
170 if (CAN_HAVE_LOCATION_P (x)
171 && EXPR_LOCATION (x) != loc
172 && !(TREE_CODE (x) == SAVE_EXPR
173 || TREE_CODE (x) == TARGET_EXPR
174 || TREE_CODE (x) == BIND_EXPR))
175 {
176 x = copy_node (x);
177 SET_EXPR_LOCATION (x, loc);
178 }
179 return x;
180 }
181 \f
182 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
183 division and returns the quotient. Otherwise returns
184 NULL_TREE. */
185
186 tree
187 div_if_zero_remainder (const_tree arg1, const_tree arg2)
188 {
189 widest_int quo;
190
191 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
192 SIGNED, &quo))
193 return wide_int_to_tree (TREE_TYPE (arg1), quo);
194
195 return NULL_TREE;
196 }
197 \f
198 /* This is nonzero if we should defer warnings about undefined
199 overflow. This facility exists because these warnings are a
200 special case. The code to estimate loop iterations does not want
201 to issue any warnings, since it works with expressions which do not
202 occur in user code. Various bits of cleanup code call fold(), but
203 only use the result if it has certain characteristics (e.g., is a
204 constant); that code only wants to issue a warning if the result is
205 used. */
206
207 static int fold_deferring_overflow_warnings;
208
209 /* If a warning about undefined overflow is deferred, this is the
210 warning. Note that this may cause us to turn two warnings into
211 one, but that is fine since it is sufficient to only give one
212 warning per expression. */
213
214 static const char* fold_deferred_overflow_warning;
215
216 /* If a warning about undefined overflow is deferred, this is the
217 level at which the warning should be emitted. */
218
219 static enum warn_strict_overflow_code fold_deferred_overflow_code;
220
221 /* Start deferring overflow warnings. We could use a stack here to
222 permit nested calls, but at present it is not necessary. */
223
224 void
225 fold_defer_overflow_warnings (void)
226 {
227 ++fold_deferring_overflow_warnings;
228 }
229
230 /* Stop deferring overflow warnings. If there is a pending warning,
231 and ISSUE is true, then issue the warning if appropriate. STMT is
232 the statement with which the warning should be associated (used for
233 location information); STMT may be NULL. CODE is the level of the
234 warning--a warn_strict_overflow_code value. This function will use
235 the smaller of CODE and the deferred code when deciding whether to
236 issue the warning. CODE may be zero to mean to always use the
237 deferred code. */
238
239 void
240 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
241 {
242 const char *warnmsg;
243 location_t locus;
244
245 gcc_assert (fold_deferring_overflow_warnings > 0);
246 --fold_deferring_overflow_warnings;
247 if (fold_deferring_overflow_warnings > 0)
248 {
249 if (fold_deferred_overflow_warning != NULL
250 && code != 0
251 && code < (int) fold_deferred_overflow_code)
252 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
253 return;
254 }
255
256 warnmsg = fold_deferred_overflow_warning;
257 fold_deferred_overflow_warning = NULL;
258
259 if (!issue || warnmsg == NULL)
260 return;
261
262 if (gimple_no_warning_p (stmt))
263 return;
264
265 /* Use the smallest code level when deciding to issue the
266 warning. */
267 if (code == 0 || code > (int) fold_deferred_overflow_code)
268 code = fold_deferred_overflow_code;
269
270 if (!issue_strict_overflow_warning (code))
271 return;
272
273 if (stmt == NULL)
274 locus = input_location;
275 else
276 locus = gimple_location (stmt);
277 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
278 }
279
280 /* Stop deferring overflow warnings, ignoring any deferred
281 warnings. */
282
283 void
284 fold_undefer_and_ignore_overflow_warnings (void)
285 {
286 fold_undefer_overflow_warnings (false, NULL, 0);
287 }
288
289 /* Whether we are deferring overflow warnings. */
290
291 bool
292 fold_deferring_overflow_warnings_p (void)
293 {
294 return fold_deferring_overflow_warnings > 0;
295 }
296
297 /* This is called when we fold something based on the fact that signed
298 overflow is undefined. */
299
300 static void
301 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
302 {
303 if (fold_deferring_overflow_warnings > 0)
304 {
305 if (fold_deferred_overflow_warning == NULL
306 || wc < fold_deferred_overflow_code)
307 {
308 fold_deferred_overflow_warning = gmsgid;
309 fold_deferred_overflow_code = wc;
310 }
311 }
312 else if (issue_strict_overflow_warning (wc))
313 warning (OPT_Wstrict_overflow, gmsgid);
314 }
315 \f
316 /* Return true if the built-in mathematical function specified by CODE
317 is odd, i.e. -f(x) == f(-x). */
318
319 bool
320 negate_mathfn_p (combined_fn fn)
321 {
322 switch (fn)
323 {
324 CASE_CFN_ASIN:
325 CASE_CFN_ASINH:
326 CASE_CFN_ATAN:
327 CASE_CFN_ATANH:
328 CASE_CFN_CASIN:
329 CASE_CFN_CASINH:
330 CASE_CFN_CATAN:
331 CASE_CFN_CATANH:
332 CASE_CFN_CBRT:
333 CASE_CFN_CPROJ:
334 CASE_CFN_CSIN:
335 CASE_CFN_CSINH:
336 CASE_CFN_CTAN:
337 CASE_CFN_CTANH:
338 CASE_CFN_ERF:
339 CASE_CFN_LLROUND:
340 CASE_CFN_LROUND:
341 CASE_CFN_ROUND:
342 CASE_CFN_SIN:
343 CASE_CFN_SINH:
344 CASE_CFN_TAN:
345 CASE_CFN_TANH:
346 CASE_CFN_TRUNC:
347 return true;
348
349 CASE_CFN_LLRINT:
350 CASE_CFN_LRINT:
351 CASE_CFN_NEARBYINT:
352 CASE_CFN_RINT:
353 return !flag_rounding_math;
354
355 default:
356 break;
357 }
358 return false;
359 }
360
361 /* Check whether we may negate an integer constant T without causing
362 overflow. */
363
364 bool
365 may_negate_without_overflow_p (const_tree t)
366 {
367 tree type;
368
369 gcc_assert (TREE_CODE (t) == INTEGER_CST);
370
371 type = TREE_TYPE (t);
372 if (TYPE_UNSIGNED (type))
373 return false;
374
375 return !wi::only_sign_bit_p (t);
376 }
377
378 /* Determine whether an expression T can be cheaply negated using
379 the function negate_expr without introducing undefined overflow. */
380
381 static bool
382 negate_expr_p (tree t)
383 {
384 tree type;
385
386 if (t == 0)
387 return false;
388
389 type = TREE_TYPE (t);
390
391 STRIP_SIGN_NOPS (t);
392 switch (TREE_CODE (t))
393 {
394 case INTEGER_CST:
395 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
396 return true;
397
398 /* Check that -CST will not overflow type. */
399 return may_negate_without_overflow_p (t);
400 case BIT_NOT_EXPR:
401 return (INTEGRAL_TYPE_P (type)
402 && TYPE_OVERFLOW_WRAPS (type));
403
404 case FIXED_CST:
405 return true;
406
407 case NEGATE_EXPR:
408 return !TYPE_OVERFLOW_SANITIZED (type);
409
410 case REAL_CST:
411 /* We want to canonicalize to positive real constants. Pretend
412 that only negative ones can be easily negated. */
413 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
414
415 case COMPLEX_CST:
416 return negate_expr_p (TREE_REALPART (t))
417 && negate_expr_p (TREE_IMAGPART (t));
418
419 case VECTOR_CST:
420 {
421 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
422 return true;
423
424 int count = TYPE_VECTOR_SUBPARTS (type), i;
425
426 for (i = 0; i < count; i++)
427 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
428 return false;
429
430 return true;
431 }
432
433 case COMPLEX_EXPR:
434 return negate_expr_p (TREE_OPERAND (t, 0))
435 && negate_expr_p (TREE_OPERAND (t, 1));
436
437 case CONJ_EXPR:
438 return negate_expr_p (TREE_OPERAND (t, 0));
439
440 case PLUS_EXPR:
441 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
442 || HONOR_SIGNED_ZEROS (element_mode (type))
443 || (INTEGRAL_TYPE_P (type)
444 && ! TYPE_OVERFLOW_WRAPS (type)))
445 return false;
446 /* -(A + B) -> (-B) - A. */
447 if (negate_expr_p (TREE_OPERAND (t, 1))
448 && reorder_operands_p (TREE_OPERAND (t, 0),
449 TREE_OPERAND (t, 1)))
450 return true;
451 /* -(A + B) -> (-A) - B. */
452 return negate_expr_p (TREE_OPERAND (t, 0));
453
454 case MINUS_EXPR:
455 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
456 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
457 && !HONOR_SIGNED_ZEROS (element_mode (type))
458 && (! INTEGRAL_TYPE_P (type)
459 || TYPE_OVERFLOW_WRAPS (type))
460 && reorder_operands_p (TREE_OPERAND (t, 0),
461 TREE_OPERAND (t, 1));
462
463 case MULT_EXPR:
464 if (TYPE_UNSIGNED (type))
465 break;
466 /* INT_MIN/n * n doesn't overflow while negating one operand it does
467 if n is a power of two. */
468 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
469 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
470 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
471 && ! integer_pow2p (TREE_OPERAND (t, 0)))
472 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
473 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
474 break;
475
476 /* Fall through. */
477
478 case RDIV_EXPR:
479 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
480 return negate_expr_p (TREE_OPERAND (t, 1))
481 || negate_expr_p (TREE_OPERAND (t, 0));
482 break;
483
484 case TRUNC_DIV_EXPR:
485 case ROUND_DIV_EXPR:
486 case EXACT_DIV_EXPR:
487 if (TYPE_UNSIGNED (type))
488 break;
489 if (negate_expr_p (TREE_OPERAND (t, 0)))
490 return true;
491 /* In general we can't negate B in A / B, because if A is INT_MIN and
492 B is 1, we may turn this into INT_MIN / -1 which is undefined
493 and actually traps on some architectures. */
494 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
495 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
496 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
497 && ! integer_onep (TREE_OPERAND (t, 1))))
498 return negate_expr_p (TREE_OPERAND (t, 1));
499 break;
500
501 case NOP_EXPR:
502 /* Negate -((double)float) as (double)(-float). */
503 if (TREE_CODE (type) == REAL_TYPE)
504 {
505 tree tem = strip_float_extensions (t);
506 if (tem != t)
507 return negate_expr_p (tem);
508 }
509 break;
510
511 case CALL_EXPR:
512 /* Negate -f(x) as f(-x). */
513 if (negate_mathfn_p (get_call_combined_fn (t)))
514 return negate_expr_p (CALL_EXPR_ARG (t, 0));
515 break;
516
517 case RSHIFT_EXPR:
518 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
519 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
520 {
521 tree op1 = TREE_OPERAND (t, 1);
522 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
523 return true;
524 }
525 break;
526
527 default:
528 break;
529 }
530 return false;
531 }
532
533 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
534 simplification is possible.
535 If negate_expr_p would return true for T, NULL_TREE will never be
536 returned. */
537
538 static tree
539 fold_negate_expr (location_t loc, tree t)
540 {
541 tree type = TREE_TYPE (t);
542 tree tem;
543
544 switch (TREE_CODE (t))
545 {
546 /* Convert - (~A) to A + 1. */
547 case BIT_NOT_EXPR:
548 if (INTEGRAL_TYPE_P (type))
549 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
550 build_one_cst (type));
551 break;
552
553 case INTEGER_CST:
554 tem = fold_negate_const (t, type);
555 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
556 || (ANY_INTEGRAL_TYPE_P (type)
557 && !TYPE_OVERFLOW_TRAPS (type)
558 && TYPE_OVERFLOW_WRAPS (type))
559 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
560 return tem;
561 break;
562
563 case REAL_CST:
564 tem = fold_negate_const (t, type);
565 return tem;
566
567 case FIXED_CST:
568 tem = fold_negate_const (t, type);
569 return tem;
570
571 case COMPLEX_CST:
572 {
573 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
574 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
575 if (rpart && ipart)
576 return build_complex (type, rpart, ipart);
577 }
578 break;
579
580 case VECTOR_CST:
581 {
582 int count = TYPE_VECTOR_SUBPARTS (type), i;
583 tree *elts = XALLOCAVEC (tree, count);
584
585 for (i = 0; i < count; i++)
586 {
587 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
588 if (elts[i] == NULL_TREE)
589 return NULL_TREE;
590 }
591
592 return build_vector (type, elts);
593 }
594
595 case COMPLEX_EXPR:
596 if (negate_expr_p (t))
597 return fold_build2_loc (loc, COMPLEX_EXPR, type,
598 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
599 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
600 break;
601
602 case CONJ_EXPR:
603 if (negate_expr_p (t))
604 return fold_build1_loc (loc, CONJ_EXPR, type,
605 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
606 break;
607
608 case NEGATE_EXPR:
609 if (!TYPE_OVERFLOW_SANITIZED (type))
610 return TREE_OPERAND (t, 0);
611 break;
612
613 case PLUS_EXPR:
614 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
615 && !HONOR_SIGNED_ZEROS (element_mode (type)))
616 {
617 /* -(A + B) -> (-B) - A. */
618 if (negate_expr_p (TREE_OPERAND (t, 1))
619 && reorder_operands_p (TREE_OPERAND (t, 0),
620 TREE_OPERAND (t, 1)))
621 {
622 tem = negate_expr (TREE_OPERAND (t, 1));
623 return fold_build2_loc (loc, MINUS_EXPR, type,
624 tem, TREE_OPERAND (t, 0));
625 }
626
627 /* -(A + B) -> (-A) - B. */
628 if (negate_expr_p (TREE_OPERAND (t, 0)))
629 {
630 tem = negate_expr (TREE_OPERAND (t, 0));
631 return fold_build2_loc (loc, MINUS_EXPR, type,
632 tem, TREE_OPERAND (t, 1));
633 }
634 }
635 break;
636
637 case MINUS_EXPR:
638 /* - (A - B) -> B - A */
639 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
640 && !HONOR_SIGNED_ZEROS (element_mode (type))
641 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
642 return fold_build2_loc (loc, MINUS_EXPR, type,
643 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
644 break;
645
646 case MULT_EXPR:
647 if (TYPE_UNSIGNED (type))
648 break;
649
650 /* Fall through. */
651
652 case RDIV_EXPR:
653 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
654 {
655 tem = TREE_OPERAND (t, 1);
656 if (negate_expr_p (tem))
657 return fold_build2_loc (loc, TREE_CODE (t), type,
658 TREE_OPERAND (t, 0), negate_expr (tem));
659 tem = TREE_OPERAND (t, 0);
660 if (negate_expr_p (tem))
661 return fold_build2_loc (loc, TREE_CODE (t), type,
662 negate_expr (tem), TREE_OPERAND (t, 1));
663 }
664 break;
665
666 case TRUNC_DIV_EXPR:
667 case ROUND_DIV_EXPR:
668 case EXACT_DIV_EXPR:
669 if (TYPE_UNSIGNED (type))
670 break;
671 if (negate_expr_p (TREE_OPERAND (t, 0)))
672 return fold_build2_loc (loc, TREE_CODE (t), type,
673 negate_expr (TREE_OPERAND (t, 0)),
674 TREE_OPERAND (t, 1));
675 /* In general we can't negate B in A / B, because if A is INT_MIN and
676 B is 1, we may turn this into INT_MIN / -1 which is undefined
677 and actually traps on some architectures. */
678 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
679 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
680 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
681 && ! integer_onep (TREE_OPERAND (t, 1))))
682 && negate_expr_p (TREE_OPERAND (t, 1)))
683 return fold_build2_loc (loc, TREE_CODE (t), type,
684 TREE_OPERAND (t, 0),
685 negate_expr (TREE_OPERAND (t, 1)));
686 break;
687
688 case NOP_EXPR:
689 /* Convert -((double)float) into (double)(-float). */
690 if (TREE_CODE (type) == REAL_TYPE)
691 {
692 tem = strip_float_extensions (t);
693 if (tem != t && negate_expr_p (tem))
694 return fold_convert_loc (loc, type, negate_expr (tem));
695 }
696 break;
697
698 case CALL_EXPR:
699 /* Negate -f(x) as f(-x). */
700 if (negate_mathfn_p (get_call_combined_fn (t))
701 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
702 {
703 tree fndecl, arg;
704
705 fndecl = get_callee_fndecl (t);
706 arg = negate_expr (CALL_EXPR_ARG (t, 0));
707 return build_call_expr_loc (loc, fndecl, 1, arg);
708 }
709 break;
710
711 case RSHIFT_EXPR:
712 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
713 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
714 {
715 tree op1 = TREE_OPERAND (t, 1);
716 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
717 {
718 tree ntype = TYPE_UNSIGNED (type)
719 ? signed_type_for (type)
720 : unsigned_type_for (type);
721 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
722 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
723 return fold_convert_loc (loc, type, temp);
724 }
725 }
726 break;
727
728 default:
729 break;
730 }
731
732 return NULL_TREE;
733 }
734
735 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
736 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
737 return NULL_TREE. */
738
739 static tree
740 negate_expr (tree t)
741 {
742 tree type, tem;
743 location_t loc;
744
745 if (t == NULL_TREE)
746 return NULL_TREE;
747
748 loc = EXPR_LOCATION (t);
749 type = TREE_TYPE (t);
750 STRIP_SIGN_NOPS (t);
751
752 tem = fold_negate_expr (loc, t);
753 if (!tem)
754 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
755 return fold_convert_loc (loc, type, tem);
756 }
757 \f
758 /* Split a tree IN into a constant, literal and variable parts that could be
759 combined with CODE to make IN. "constant" means an expression with
760 TREE_CONSTANT but that isn't an actual constant. CODE must be a
761 commutative arithmetic operation. Store the constant part into *CONP,
762 the literal in *LITP and return the variable part. If a part isn't
763 present, set it to null. If the tree does not decompose in this way,
764 return the entire tree as the variable part and the other parts as null.
765
766 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
767 case, we negate an operand that was subtracted. Except if it is a
768 literal for which we use *MINUS_LITP instead.
769
770 If NEGATE_P is true, we are negating all of IN, again except a literal
771 for which we use *MINUS_LITP instead. If a variable part is of pointer
772 type, it is negated after converting to TYPE. This prevents us from
773 generating illegal MINUS pointer expression. LOC is the location of
774 the converted variable part.
775
776 If IN is itself a literal or constant, return it as appropriate.
777
778 Note that we do not guarantee that any of the three values will be the
779 same type as IN, but they will have the same signedness and mode. */
780
781 static tree
782 split_tree (location_t loc, tree in, tree type, enum tree_code code,
783 tree *conp, tree *litp, tree *minus_litp, int negate_p)
784 {
785 tree var = 0;
786
787 *conp = 0;
788 *litp = 0;
789 *minus_litp = 0;
790
791 /* Strip any conversions that don't change the machine mode or signedness. */
792 STRIP_SIGN_NOPS (in);
793
794 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
795 || TREE_CODE (in) == FIXED_CST)
796 *litp = in;
797 else if (TREE_CODE (in) == code
798 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
799 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
800 /* We can associate addition and subtraction together (even
801 though the C standard doesn't say so) for integers because
802 the value is not affected. For reals, the value might be
803 affected, so we can't. */
804 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
805 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
806 {
807 tree op0 = TREE_OPERAND (in, 0);
808 tree op1 = TREE_OPERAND (in, 1);
809 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
810 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
811
812 /* First see if either of the operands is a literal, then a constant. */
813 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
814 || TREE_CODE (op0) == FIXED_CST)
815 *litp = op0, op0 = 0;
816 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
817 || TREE_CODE (op1) == FIXED_CST)
818 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
819
820 if (op0 != 0 && TREE_CONSTANT (op0))
821 *conp = op0, op0 = 0;
822 else if (op1 != 0 && TREE_CONSTANT (op1))
823 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
824
825 /* If we haven't dealt with either operand, this is not a case we can
826 decompose. Otherwise, VAR is either of the ones remaining, if any. */
827 if (op0 != 0 && op1 != 0)
828 var = in;
829 else if (op0 != 0)
830 var = op0;
831 else
832 var = op1, neg_var_p = neg1_p;
833
834 /* Now do any needed negations. */
835 if (neg_litp_p)
836 *minus_litp = *litp, *litp = 0;
837 if (neg_conp_p)
838 *conp = negate_expr (*conp);
839 if (neg_var_p)
840 {
841 /* Convert to TYPE before negating a pointer type expr. */
842 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
843 var = fold_convert_loc (loc, type, var);
844 var = negate_expr (var);
845 }
846 }
847 else if (TREE_CODE (in) == BIT_NOT_EXPR
848 && code == PLUS_EXPR)
849 {
850 /* -X - 1 is folded to ~X, undo that here. */
851 *minus_litp = build_one_cst (TREE_TYPE (in));
852 var = negate_expr (TREE_OPERAND (in, 0));
853 }
854 else if (TREE_CONSTANT (in))
855 *conp = in;
856 else
857 var = in;
858
859 if (negate_p)
860 {
861 if (*litp)
862 *minus_litp = *litp, *litp = 0;
863 else if (*minus_litp)
864 *litp = *minus_litp, *minus_litp = 0;
865 *conp = negate_expr (*conp);
866 /* Convert to TYPE before negating a pointer type expr. */
867 if (var && POINTER_TYPE_P (TREE_TYPE (var)))
868 var = fold_convert_loc (loc, type, var);
869 var = negate_expr (var);
870 }
871
872 return var;
873 }
874
875 /* Re-associate trees split by the above function. T1 and T2 are
876 either expressions to associate or null. Return the new
877 expression, if any. LOC is the location of the new expression. If
878 we build an operation, do it in TYPE and with CODE. */
879
880 static tree
881 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
882 {
883 if (t1 == 0)
884 return t2;
885 else if (t2 == 0)
886 return t1;
887
888 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
889 try to fold this since we will have infinite recursion. But do
890 deal with any NEGATE_EXPRs. */
891 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
892 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
893 {
894 if (code == PLUS_EXPR)
895 {
896 if (TREE_CODE (t1) == NEGATE_EXPR)
897 return build2_loc (loc, MINUS_EXPR, type,
898 fold_convert_loc (loc, type, t2),
899 fold_convert_loc (loc, type,
900 TREE_OPERAND (t1, 0)));
901 else if (TREE_CODE (t2) == NEGATE_EXPR)
902 return build2_loc (loc, MINUS_EXPR, type,
903 fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type,
905 TREE_OPERAND (t2, 0)));
906 else if (integer_zerop (t2))
907 return fold_convert_loc (loc, type, t1);
908 }
909 else if (code == MINUS_EXPR)
910 {
911 if (integer_zerop (t2))
912 return fold_convert_loc (loc, type, t1);
913 }
914
915 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
916 fold_convert_loc (loc, type, t2));
917 }
918
919 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
920 fold_convert_loc (loc, type, t2));
921 }
922 \f
923 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
924 for use in int_const_binop, size_binop and size_diffop. */
925
926 static bool
927 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
928 {
929 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
930 return false;
931 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
932 return false;
933
934 switch (code)
935 {
936 case LSHIFT_EXPR:
937 case RSHIFT_EXPR:
938 case LROTATE_EXPR:
939 case RROTATE_EXPR:
940 return true;
941
942 default:
943 break;
944 }
945
946 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
947 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
948 && TYPE_MODE (type1) == TYPE_MODE (type2);
949 }
950
951
952 /* Combine two integer constants ARG1 and ARG2 under operation CODE
953 to produce a new constant. Return NULL_TREE if we don't know how
954 to evaluate CODE at compile-time. */
955
956 static tree
957 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
958 int overflowable)
959 {
960 wide_int res;
961 tree t;
962 tree type = TREE_TYPE (arg1);
963 signop sign = TYPE_SIGN (type);
964 bool overflow = false;
965
966 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
967 TYPE_SIGN (TREE_TYPE (parg2)));
968
969 switch (code)
970 {
971 case BIT_IOR_EXPR:
972 res = wi::bit_or (arg1, arg2);
973 break;
974
975 case BIT_XOR_EXPR:
976 res = wi::bit_xor (arg1, arg2);
977 break;
978
979 case BIT_AND_EXPR:
980 res = wi::bit_and (arg1, arg2);
981 break;
982
983 case RSHIFT_EXPR:
984 case LSHIFT_EXPR:
985 if (wi::neg_p (arg2))
986 {
987 arg2 = -arg2;
988 if (code == RSHIFT_EXPR)
989 code = LSHIFT_EXPR;
990 else
991 code = RSHIFT_EXPR;
992 }
993
994 if (code == RSHIFT_EXPR)
995 /* It's unclear from the C standard whether shifts can overflow.
996 The following code ignores overflow; perhaps a C standard
997 interpretation ruling is needed. */
998 res = wi::rshift (arg1, arg2, sign);
999 else
1000 res = wi::lshift (arg1, arg2);
1001 break;
1002
1003 case RROTATE_EXPR:
1004 case LROTATE_EXPR:
1005 if (wi::neg_p (arg2))
1006 {
1007 arg2 = -arg2;
1008 if (code == RROTATE_EXPR)
1009 code = LROTATE_EXPR;
1010 else
1011 code = RROTATE_EXPR;
1012 }
1013
1014 if (code == RROTATE_EXPR)
1015 res = wi::rrotate (arg1, arg2);
1016 else
1017 res = wi::lrotate (arg1, arg2);
1018 break;
1019
1020 case PLUS_EXPR:
1021 res = wi::add (arg1, arg2, sign, &overflow);
1022 break;
1023
1024 case MINUS_EXPR:
1025 res = wi::sub (arg1, arg2, sign, &overflow);
1026 break;
1027
1028 case MULT_EXPR:
1029 res = wi::mul (arg1, arg2, sign, &overflow);
1030 break;
1031
1032 case MULT_HIGHPART_EXPR:
1033 res = wi::mul_high (arg1, arg2, sign);
1034 break;
1035
1036 case TRUNC_DIV_EXPR:
1037 case EXACT_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case FLOOR_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_floor (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case CEIL_DIV_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case ROUND_DIV_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::div_round (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case TRUNC_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case FLOOR_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case CEIL_MOD_EXPR:
1074 if (arg2 == 0)
1075 return NULL_TREE;
1076 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1077 break;
1078
1079 case ROUND_MOD_EXPR:
1080 if (arg2 == 0)
1081 return NULL_TREE;
1082 res = wi::mod_round (arg1, arg2, sign, &overflow);
1083 break;
1084
1085 case MIN_EXPR:
1086 res = wi::min (arg1, arg2, sign);
1087 break;
1088
1089 case MAX_EXPR:
1090 res = wi::max (arg1, arg2, sign);
1091 break;
1092
1093 default:
1094 return NULL_TREE;
1095 }
1096
1097 t = force_fit_type (type, res, overflowable,
1098 (((sign == SIGNED || overflowable == -1)
1099 && overflow)
1100 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1101
1102 return t;
1103 }
1104
1105 tree
1106 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1107 {
1108 return int_const_binop_1 (code, arg1, arg2, 1);
1109 }
1110
1111 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1112 constant. We assume ARG1 and ARG2 have the same data type, or at least
1113 are the same kind of constant and the same machine mode. Return zero if
1114 combining the constants is not allowed in the current operating mode. */
1115
1116 static tree
1117 const_binop (enum tree_code code, tree arg1, tree arg2)
1118 {
1119 /* Sanity check for the recursive cases. */
1120 if (!arg1 || !arg2)
1121 return NULL_TREE;
1122
1123 STRIP_NOPS (arg1);
1124 STRIP_NOPS (arg2);
1125
1126 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1127 {
1128 if (code == POINTER_PLUS_EXPR)
1129 return int_const_binop (PLUS_EXPR,
1130 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1131
1132 return int_const_binop (code, arg1, arg2);
1133 }
1134
1135 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1136 {
1137 machine_mode mode;
1138 REAL_VALUE_TYPE d1;
1139 REAL_VALUE_TYPE d2;
1140 REAL_VALUE_TYPE value;
1141 REAL_VALUE_TYPE result;
1142 bool inexact;
1143 tree t, type;
1144
1145 /* The following codes are handled by real_arithmetic. */
1146 switch (code)
1147 {
1148 case PLUS_EXPR:
1149 case MINUS_EXPR:
1150 case MULT_EXPR:
1151 case RDIV_EXPR:
1152 case MIN_EXPR:
1153 case MAX_EXPR:
1154 break;
1155
1156 default:
1157 return NULL_TREE;
1158 }
1159
1160 d1 = TREE_REAL_CST (arg1);
1161 d2 = TREE_REAL_CST (arg2);
1162
1163 type = TREE_TYPE (arg1);
1164 mode = TYPE_MODE (type);
1165
1166 /* Don't perform operation if we honor signaling NaNs and
1167 either operand is a signaling NaN. */
1168 if (HONOR_SNANS (mode)
1169 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1170 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1171 return NULL_TREE;
1172
1173 /* Don't perform operation if it would raise a division
1174 by zero exception. */
1175 if (code == RDIV_EXPR
1176 && real_equal (&d2, &dconst0)
1177 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1178 return NULL_TREE;
1179
1180 /* If either operand is a NaN, just return it. Otherwise, set up
1181 for floating-point trap; we return an overflow. */
1182 if (REAL_VALUE_ISNAN (d1))
1183 {
1184 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1185 is off. */
1186 d1.signalling = 0;
1187 t = build_real (type, d1);
1188 return t;
1189 }
1190 else if (REAL_VALUE_ISNAN (d2))
1191 {
1192 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1193 is off. */
1194 d2.signalling = 0;
1195 t = build_real (type, d2);
1196 return t;
1197 }
1198
1199 inexact = real_arithmetic (&value, code, &d1, &d2);
1200 real_convert (&result, mode, &value);
1201
1202 /* Don't constant fold this floating point operation if
1203 the result has overflowed and flag_trapping_math. */
1204 if (flag_trapping_math
1205 && MODE_HAS_INFINITIES (mode)
1206 && REAL_VALUE_ISINF (result)
1207 && !REAL_VALUE_ISINF (d1)
1208 && !REAL_VALUE_ISINF (d2))
1209 return NULL_TREE;
1210
1211 /* Don't constant fold this floating point operation if the
1212 result may dependent upon the run-time rounding mode and
1213 flag_rounding_math is set, or if GCC's software emulation
1214 is unable to accurately represent the result. */
1215 if ((flag_rounding_math
1216 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1217 && (inexact || !real_identical (&result, &value)))
1218 return NULL_TREE;
1219
1220 t = build_real (type, result);
1221
1222 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1223 return t;
1224 }
1225
1226 if (TREE_CODE (arg1) == FIXED_CST)
1227 {
1228 FIXED_VALUE_TYPE f1;
1229 FIXED_VALUE_TYPE f2;
1230 FIXED_VALUE_TYPE result;
1231 tree t, type;
1232 int sat_p;
1233 bool overflow_p;
1234
1235 /* The following codes are handled by fixed_arithmetic. */
1236 switch (code)
1237 {
1238 case PLUS_EXPR:
1239 case MINUS_EXPR:
1240 case MULT_EXPR:
1241 case TRUNC_DIV_EXPR:
1242 if (TREE_CODE (arg2) != FIXED_CST)
1243 return NULL_TREE;
1244 f2 = TREE_FIXED_CST (arg2);
1245 break;
1246
1247 case LSHIFT_EXPR:
1248 case RSHIFT_EXPR:
1249 {
1250 if (TREE_CODE (arg2) != INTEGER_CST)
1251 return NULL_TREE;
1252 wide_int w2 = arg2;
1253 f2.data.high = w2.elt (1);
1254 f2.data.low = w2.elt (0);
1255 f2.mode = SImode;
1256 }
1257 break;
1258
1259 default:
1260 return NULL_TREE;
1261 }
1262
1263 f1 = TREE_FIXED_CST (arg1);
1264 type = TREE_TYPE (arg1);
1265 sat_p = TYPE_SATURATING (type);
1266 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1267 t = build_fixed (type, result);
1268 /* Propagate overflow flags. */
1269 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1270 TREE_OVERFLOW (t) = 1;
1271 return t;
1272 }
1273
1274 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1275 {
1276 tree type = TREE_TYPE (arg1);
1277 tree r1 = TREE_REALPART (arg1);
1278 tree i1 = TREE_IMAGPART (arg1);
1279 tree r2 = TREE_REALPART (arg2);
1280 tree i2 = TREE_IMAGPART (arg2);
1281 tree real, imag;
1282
1283 switch (code)
1284 {
1285 case PLUS_EXPR:
1286 case MINUS_EXPR:
1287 real = const_binop (code, r1, r2);
1288 imag = const_binop (code, i1, i2);
1289 break;
1290
1291 case MULT_EXPR:
1292 if (COMPLEX_FLOAT_TYPE_P (type))
1293 return do_mpc_arg2 (arg1, arg2, type,
1294 /* do_nonfinite= */ folding_initializer,
1295 mpc_mul);
1296
1297 real = const_binop (MINUS_EXPR,
1298 const_binop (MULT_EXPR, r1, r2),
1299 const_binop (MULT_EXPR, i1, i2));
1300 imag = const_binop (PLUS_EXPR,
1301 const_binop (MULT_EXPR, r1, i2),
1302 const_binop (MULT_EXPR, i1, r2));
1303 break;
1304
1305 case RDIV_EXPR:
1306 if (COMPLEX_FLOAT_TYPE_P (type))
1307 return do_mpc_arg2 (arg1, arg2, type,
1308 /* do_nonfinite= */ folding_initializer,
1309 mpc_div);
1310 /* Fallthru ... */
1311 case TRUNC_DIV_EXPR:
1312 case CEIL_DIV_EXPR:
1313 case FLOOR_DIV_EXPR:
1314 case ROUND_DIV_EXPR:
1315 if (flag_complex_method == 0)
1316 {
1317 /* Keep this algorithm in sync with
1318 tree-complex.c:expand_complex_div_straight().
1319
1320 Expand complex division to scalars, straightforward algorithm.
1321 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1322 t = br*br + bi*bi
1323 */
1324 tree magsquared
1325 = const_binop (PLUS_EXPR,
1326 const_binop (MULT_EXPR, r2, r2),
1327 const_binop (MULT_EXPR, i2, i2));
1328 tree t1
1329 = const_binop (PLUS_EXPR,
1330 const_binop (MULT_EXPR, r1, r2),
1331 const_binop (MULT_EXPR, i1, i2));
1332 tree t2
1333 = const_binop (MINUS_EXPR,
1334 const_binop (MULT_EXPR, i1, r2),
1335 const_binop (MULT_EXPR, r1, i2));
1336
1337 real = const_binop (code, t1, magsquared);
1338 imag = const_binop (code, t2, magsquared);
1339 }
1340 else
1341 {
1342 /* Keep this algorithm in sync with
1343 tree-complex.c:expand_complex_div_wide().
1344
1345 Expand complex division to scalars, modified algorithm to minimize
1346 overflow with wide input ranges. */
1347 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1348 fold_abs_const (r2, TREE_TYPE (type)),
1349 fold_abs_const (i2, TREE_TYPE (type)));
1350
1351 if (integer_nonzerop (compare))
1352 {
1353 /* In the TRUE branch, we compute
1354 ratio = br/bi;
1355 div = (br * ratio) + bi;
1356 tr = (ar * ratio) + ai;
1357 ti = (ai * ratio) - ar;
1358 tr = tr / div;
1359 ti = ti / div; */
1360 tree ratio = const_binop (code, r2, i2);
1361 tree div = const_binop (PLUS_EXPR, i2,
1362 const_binop (MULT_EXPR, r2, ratio));
1363 real = const_binop (MULT_EXPR, r1, ratio);
1364 real = const_binop (PLUS_EXPR, real, i1);
1365 real = const_binop (code, real, div);
1366
1367 imag = const_binop (MULT_EXPR, i1, ratio);
1368 imag = const_binop (MINUS_EXPR, imag, r1);
1369 imag = const_binop (code, imag, div);
1370 }
1371 else
1372 {
1373 /* In the FALSE branch, we compute
1374 ratio = d/c;
1375 divisor = (d * ratio) + c;
1376 tr = (b * ratio) + a;
1377 ti = b - (a * ratio);
1378 tr = tr / div;
1379 ti = ti / div; */
1380 tree ratio = const_binop (code, i2, r2);
1381 tree div = const_binop (PLUS_EXPR, r2,
1382 const_binop (MULT_EXPR, i2, ratio));
1383
1384 real = const_binop (MULT_EXPR, i1, ratio);
1385 real = const_binop (PLUS_EXPR, real, r1);
1386 real = const_binop (code, real, div);
1387
1388 imag = const_binop (MULT_EXPR, r1, ratio);
1389 imag = const_binop (MINUS_EXPR, i1, imag);
1390 imag = const_binop (code, imag, div);
1391 }
1392 }
1393 break;
1394
1395 default:
1396 return NULL_TREE;
1397 }
1398
1399 if (real && imag)
1400 return build_complex (type, real, imag);
1401 }
1402
1403 if (TREE_CODE (arg1) == VECTOR_CST
1404 && TREE_CODE (arg2) == VECTOR_CST)
1405 {
1406 tree type = TREE_TYPE (arg1);
1407 int count = TYPE_VECTOR_SUBPARTS (type), i;
1408 tree *elts = XALLOCAVEC (tree, count);
1409
1410 for (i = 0; i < count; i++)
1411 {
1412 tree elem1 = VECTOR_CST_ELT (arg1, i);
1413 tree elem2 = VECTOR_CST_ELT (arg2, i);
1414
1415 elts[i] = const_binop (code, elem1, elem2);
1416
1417 /* It is possible that const_binop cannot handle the given
1418 code and return NULL_TREE */
1419 if (elts[i] == NULL_TREE)
1420 return NULL_TREE;
1421 }
1422
1423 return build_vector (type, elts);
1424 }
1425
1426 /* Shifts allow a scalar offset for a vector. */
1427 if (TREE_CODE (arg1) == VECTOR_CST
1428 && TREE_CODE (arg2) == INTEGER_CST)
1429 {
1430 tree type = TREE_TYPE (arg1);
1431 int count = TYPE_VECTOR_SUBPARTS (type), i;
1432 tree *elts = XALLOCAVEC (tree, count);
1433
1434 for (i = 0; i < count; i++)
1435 {
1436 tree elem1 = VECTOR_CST_ELT (arg1, i);
1437
1438 elts[i] = const_binop (code, elem1, arg2);
1439
1440 /* It is possible that const_binop cannot handle the given
1441 code and return NULL_TREE. */
1442 if (elts[i] == NULL_TREE)
1443 return NULL_TREE;
1444 }
1445
1446 return build_vector (type, elts);
1447 }
1448 return NULL_TREE;
1449 }
1450
1451 /* Overload that adds a TYPE parameter to be able to dispatch
1452 to fold_relational_const. */
1453
1454 tree
1455 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1456 {
1457 if (TREE_CODE_CLASS (code) == tcc_comparison)
1458 return fold_relational_const (code, type, arg1, arg2);
1459
1460 /* ??? Until we make the const_binop worker take the type of the
1461 result as argument put those cases that need it here. */
1462 switch (code)
1463 {
1464 case COMPLEX_EXPR:
1465 if ((TREE_CODE (arg1) == REAL_CST
1466 && TREE_CODE (arg2) == REAL_CST)
1467 || (TREE_CODE (arg1) == INTEGER_CST
1468 && TREE_CODE (arg2) == INTEGER_CST))
1469 return build_complex (type, arg1, arg2);
1470 return NULL_TREE;
1471
1472 case VEC_PACK_TRUNC_EXPR:
1473 case VEC_PACK_FIX_TRUNC_EXPR:
1474 {
1475 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1476 tree *elts;
1477
1478 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1479 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1480 if (TREE_CODE (arg1) != VECTOR_CST
1481 || TREE_CODE (arg2) != VECTOR_CST)
1482 return NULL_TREE;
1483
1484 elts = XALLOCAVEC (tree, nelts);
1485 if (!vec_cst_ctor_to_array (arg1, elts)
1486 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1487 return NULL_TREE;
1488
1489 for (i = 0; i < nelts; i++)
1490 {
1491 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1492 ? NOP_EXPR : FIX_TRUNC_EXPR,
1493 TREE_TYPE (type), elts[i]);
1494 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1495 return NULL_TREE;
1496 }
1497
1498 return build_vector (type, elts);
1499 }
1500
1501 case VEC_WIDEN_MULT_LO_EXPR:
1502 case VEC_WIDEN_MULT_HI_EXPR:
1503 case VEC_WIDEN_MULT_EVEN_EXPR:
1504 case VEC_WIDEN_MULT_ODD_EXPR:
1505 {
1506 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1507 unsigned int out, ofs, scale;
1508 tree *elts;
1509
1510 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1511 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1512 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1513 return NULL_TREE;
1514
1515 elts = XALLOCAVEC (tree, nelts * 4);
1516 if (!vec_cst_ctor_to_array (arg1, elts)
1517 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1518 return NULL_TREE;
1519
1520 if (code == VEC_WIDEN_MULT_LO_EXPR)
1521 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1522 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1523 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1524 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1525 scale = 1, ofs = 0;
1526 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1527 scale = 1, ofs = 1;
1528
1529 for (out = 0; out < nelts; out++)
1530 {
1531 unsigned int in1 = (out << scale) + ofs;
1532 unsigned int in2 = in1 + nelts * 2;
1533 tree t1, t2;
1534
1535 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1536 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1537
1538 if (t1 == NULL_TREE || t2 == NULL_TREE)
1539 return NULL_TREE;
1540 elts[out] = const_binop (MULT_EXPR, t1, t2);
1541 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1542 return NULL_TREE;
1543 }
1544
1545 return build_vector (type, elts);
1546 }
1547
1548 default:;
1549 }
1550
1551 if (TREE_CODE_CLASS (code) != tcc_binary)
1552 return NULL_TREE;
1553
1554 /* Make sure type and arg0 have the same saturating flag. */
1555 gcc_checking_assert (TYPE_SATURATING (type)
1556 == TYPE_SATURATING (TREE_TYPE (arg1)));
1557
1558 return const_binop (code, arg1, arg2);
1559 }
1560
1561 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1562 Return zero if computing the constants is not possible. */
1563
1564 tree
1565 const_unop (enum tree_code code, tree type, tree arg0)
1566 {
1567 /* Don't perform the operation, other than NEGATE and ABS, if
1568 flag_signaling_nans is on and the operand is a signaling NaN. */
1569 if (TREE_CODE (arg0) == REAL_CST
1570 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1571 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1572 && code != NEGATE_EXPR
1573 && code != ABS_EXPR)
1574 return NULL_TREE;
1575
1576 switch (code)
1577 {
1578 CASE_CONVERT:
1579 case FLOAT_EXPR:
1580 case FIX_TRUNC_EXPR:
1581 case FIXED_CONVERT_EXPR:
1582 return fold_convert_const (code, type, arg0);
1583
1584 case ADDR_SPACE_CONVERT_EXPR:
1585 /* If the source address is 0, and the source address space
1586 cannot have a valid object at 0, fold to dest type null. */
1587 if (integer_zerop (arg0)
1588 && !(targetm.addr_space.zero_address_valid
1589 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1590 return fold_convert_const (code, type, arg0);
1591 break;
1592
1593 case VIEW_CONVERT_EXPR:
1594 return fold_view_convert_expr (type, arg0);
1595
1596 case NEGATE_EXPR:
1597 {
1598 /* Can't call fold_negate_const directly here as that doesn't
1599 handle all cases and we might not be able to negate some
1600 constants. */
1601 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1602 if (tem && CONSTANT_CLASS_P (tem))
1603 return tem;
1604 break;
1605 }
1606
1607 case ABS_EXPR:
1608 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1609 return fold_abs_const (arg0, type);
1610 break;
1611
1612 case CONJ_EXPR:
1613 if (TREE_CODE (arg0) == COMPLEX_CST)
1614 {
1615 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1616 TREE_TYPE (type));
1617 return build_complex (type, TREE_REALPART (arg0), ipart);
1618 }
1619 break;
1620
1621 case BIT_NOT_EXPR:
1622 if (TREE_CODE (arg0) == INTEGER_CST)
1623 return fold_not_const (arg0, type);
1624 /* Perform BIT_NOT_EXPR on each element individually. */
1625 else if (TREE_CODE (arg0) == VECTOR_CST)
1626 {
1627 tree *elements;
1628 tree elem;
1629 unsigned count = VECTOR_CST_NELTS (arg0), i;
1630
1631 elements = XALLOCAVEC (tree, count);
1632 for (i = 0; i < count; i++)
1633 {
1634 elem = VECTOR_CST_ELT (arg0, i);
1635 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1636 if (elem == NULL_TREE)
1637 break;
1638 elements[i] = elem;
1639 }
1640 if (i == count)
1641 return build_vector (type, elements);
1642 }
1643 break;
1644
1645 case TRUTH_NOT_EXPR:
1646 if (TREE_CODE (arg0) == INTEGER_CST)
1647 return constant_boolean_node (integer_zerop (arg0), type);
1648 break;
1649
1650 case REALPART_EXPR:
1651 if (TREE_CODE (arg0) == COMPLEX_CST)
1652 return fold_convert (type, TREE_REALPART (arg0));
1653 break;
1654
1655 case IMAGPART_EXPR:
1656 if (TREE_CODE (arg0) == COMPLEX_CST)
1657 return fold_convert (type, TREE_IMAGPART (arg0));
1658 break;
1659
1660 case VEC_UNPACK_LO_EXPR:
1661 case VEC_UNPACK_HI_EXPR:
1662 case VEC_UNPACK_FLOAT_LO_EXPR:
1663 case VEC_UNPACK_FLOAT_HI_EXPR:
1664 {
1665 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1666 tree *elts;
1667 enum tree_code subcode;
1668
1669 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1670 if (TREE_CODE (arg0) != VECTOR_CST)
1671 return NULL_TREE;
1672
1673 elts = XALLOCAVEC (tree, nelts * 2);
1674 if (!vec_cst_ctor_to_array (arg0, elts))
1675 return NULL_TREE;
1676
1677 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1678 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1679 elts += nelts;
1680
1681 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1682 subcode = NOP_EXPR;
1683 else
1684 subcode = FLOAT_EXPR;
1685
1686 for (i = 0; i < nelts; i++)
1687 {
1688 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1689 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1690 return NULL_TREE;
1691 }
1692
1693 return build_vector (type, elts);
1694 }
1695
1696 case REDUC_MIN_EXPR:
1697 case REDUC_MAX_EXPR:
1698 case REDUC_PLUS_EXPR:
1699 {
1700 unsigned int nelts, i;
1701 tree *elts;
1702 enum tree_code subcode;
1703
1704 if (TREE_CODE (arg0) != VECTOR_CST)
1705 return NULL_TREE;
1706 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1707
1708 elts = XALLOCAVEC (tree, nelts);
1709 if (!vec_cst_ctor_to_array (arg0, elts))
1710 return NULL_TREE;
1711
1712 switch (code)
1713 {
1714 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1715 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1716 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1717 default: gcc_unreachable ();
1718 }
1719
1720 for (i = 1; i < nelts; i++)
1721 {
1722 elts[0] = const_binop (subcode, elts[0], elts[i]);
1723 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1724 return NULL_TREE;
1725 }
1726
1727 return elts[0];
1728 }
1729
1730 default:
1731 break;
1732 }
1733
1734 return NULL_TREE;
1735 }
1736
1737 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1738 indicates which particular sizetype to create. */
1739
1740 tree
1741 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1742 {
1743 return build_int_cst (sizetype_tab[(int) kind], number);
1744 }
1745 \f
1746 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1747 is a tree code. The type of the result is taken from the operands.
1748 Both must be equivalent integer types, ala int_binop_types_match_p.
1749 If the operands are constant, so is the result. */
1750
1751 tree
1752 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1753 {
1754 tree type = TREE_TYPE (arg0);
1755
1756 if (arg0 == error_mark_node || arg1 == error_mark_node)
1757 return error_mark_node;
1758
1759 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1760 TREE_TYPE (arg1)));
1761
1762 /* Handle the special case of two integer constants faster. */
1763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1764 {
1765 /* And some specific cases even faster than that. */
1766 if (code == PLUS_EXPR)
1767 {
1768 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1771 return arg0;
1772 }
1773 else if (code == MINUS_EXPR)
1774 {
1775 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1776 return arg0;
1777 }
1778 else if (code == MULT_EXPR)
1779 {
1780 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1781 return arg1;
1782 }
1783
1784 /* Handle general case of two integer constants. For sizetype
1785 constant calculations we always want to know about overflow,
1786 even in the unsigned case. */
1787 return int_const_binop_1 (code, arg0, arg1, -1);
1788 }
1789
1790 return fold_build2_loc (loc, code, type, arg0, arg1);
1791 }
1792
1793 /* Given two values, either both of sizetype or both of bitsizetype,
1794 compute the difference between the two values. Return the value
1795 in signed type corresponding to the type of the operands. */
1796
1797 tree
1798 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1799 {
1800 tree type = TREE_TYPE (arg0);
1801 tree ctype;
1802
1803 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1804 TREE_TYPE (arg1)));
1805
1806 /* If the type is already signed, just do the simple thing. */
1807 if (!TYPE_UNSIGNED (type))
1808 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1809
1810 if (type == sizetype)
1811 ctype = ssizetype;
1812 else if (type == bitsizetype)
1813 ctype = sbitsizetype;
1814 else
1815 ctype = signed_type_for (type);
1816
1817 /* If either operand is not a constant, do the conversions to the signed
1818 type and subtract. The hardware will do the right thing with any
1819 overflow in the subtraction. */
1820 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1821 return size_binop_loc (loc, MINUS_EXPR,
1822 fold_convert_loc (loc, ctype, arg0),
1823 fold_convert_loc (loc, ctype, arg1));
1824
1825 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1826 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1827 overflow) and negate (which can't either). Special-case a result
1828 of zero while we're here. */
1829 if (tree_int_cst_equal (arg0, arg1))
1830 return build_int_cst (ctype, 0);
1831 else if (tree_int_cst_lt (arg1, arg0))
1832 return fold_convert_loc (loc, ctype,
1833 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1834 else
1835 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1836 fold_convert_loc (loc, ctype,
1837 size_binop_loc (loc,
1838 MINUS_EXPR,
1839 arg1, arg0)));
1840 }
1841 \f
1842 /* A subroutine of fold_convert_const handling conversions of an
1843 INTEGER_CST to another integer type. */
1844
1845 static tree
1846 fold_convert_const_int_from_int (tree type, const_tree arg1)
1847 {
1848 /* Given an integer constant, make new constant with new type,
1849 appropriately sign-extended or truncated. Use widest_int
1850 so that any extension is done according ARG1's type. */
1851 return force_fit_type (type, wi::to_widest (arg1),
1852 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1853 TREE_OVERFLOW (arg1));
1854 }
1855
1856 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1857 to an integer type. */
1858
1859 static tree
1860 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1861 {
1862 bool overflow = false;
1863 tree t;
1864
1865 /* The following code implements the floating point to integer
1866 conversion rules required by the Java Language Specification,
1867 that IEEE NaNs are mapped to zero and values that overflow
1868 the target precision saturate, i.e. values greater than
1869 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1870 are mapped to INT_MIN. These semantics are allowed by the
1871 C and C++ standards that simply state that the behavior of
1872 FP-to-integer conversion is unspecified upon overflow. */
1873
1874 wide_int val;
1875 REAL_VALUE_TYPE r;
1876 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1877
1878 switch (code)
1879 {
1880 case FIX_TRUNC_EXPR:
1881 real_trunc (&r, VOIDmode, &x);
1882 break;
1883
1884 default:
1885 gcc_unreachable ();
1886 }
1887
1888 /* If R is NaN, return zero and show we have an overflow. */
1889 if (REAL_VALUE_ISNAN (r))
1890 {
1891 overflow = true;
1892 val = wi::zero (TYPE_PRECISION (type));
1893 }
1894
1895 /* See if R is less than the lower bound or greater than the
1896 upper bound. */
1897
1898 if (! overflow)
1899 {
1900 tree lt = TYPE_MIN_VALUE (type);
1901 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1902 if (real_less (&r, &l))
1903 {
1904 overflow = true;
1905 val = lt;
1906 }
1907 }
1908
1909 if (! overflow)
1910 {
1911 tree ut = TYPE_MAX_VALUE (type);
1912 if (ut)
1913 {
1914 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1915 if (real_less (&u, &r))
1916 {
1917 overflow = true;
1918 val = ut;
1919 }
1920 }
1921 }
1922
1923 if (! overflow)
1924 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1925
1926 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1927 return t;
1928 }
1929
1930 /* A subroutine of fold_convert_const handling conversions of a
1931 FIXED_CST to an integer type. */
1932
1933 static tree
1934 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1935 {
1936 tree t;
1937 double_int temp, temp_trunc;
1938 unsigned int mode;
1939
1940 /* Right shift FIXED_CST to temp by fbit. */
1941 temp = TREE_FIXED_CST (arg1).data;
1942 mode = TREE_FIXED_CST (arg1).mode;
1943 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1944 {
1945 temp = temp.rshift (GET_MODE_FBIT (mode),
1946 HOST_BITS_PER_DOUBLE_INT,
1947 SIGNED_FIXED_POINT_MODE_P (mode));
1948
1949 /* Left shift temp to temp_trunc by fbit. */
1950 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1951 HOST_BITS_PER_DOUBLE_INT,
1952 SIGNED_FIXED_POINT_MODE_P (mode));
1953 }
1954 else
1955 {
1956 temp = double_int_zero;
1957 temp_trunc = double_int_zero;
1958 }
1959
1960 /* If FIXED_CST is negative, we need to round the value toward 0.
1961 By checking if the fractional bits are not zero to add 1 to temp. */
1962 if (SIGNED_FIXED_POINT_MODE_P (mode)
1963 && temp_trunc.is_negative ()
1964 && TREE_FIXED_CST (arg1).data != temp_trunc)
1965 temp += double_int_one;
1966
1967 /* Given a fixed-point constant, make new constant with new type,
1968 appropriately sign-extended or truncated. */
1969 t = force_fit_type (type, temp, -1,
1970 (temp.is_negative ()
1971 && (TYPE_UNSIGNED (type)
1972 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1973 | TREE_OVERFLOW (arg1));
1974
1975 return t;
1976 }
1977
1978 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1979 to another floating point type. */
1980
1981 static tree
1982 fold_convert_const_real_from_real (tree type, const_tree arg1)
1983 {
1984 REAL_VALUE_TYPE value;
1985 tree t;
1986
1987 /* Don't perform the operation if flag_signaling_nans is on
1988 and the operand is a signaling NaN. */
1989 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1990 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1991 return NULL_TREE;
1992
1993 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1994 t = build_real (type, value);
1995
1996 /* If converting an infinity or NAN to a representation that doesn't
1997 have one, set the overflow bit so that we can produce some kind of
1998 error message at the appropriate point if necessary. It's not the
1999 most user-friendly message, but it's better than nothing. */
2000 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
2001 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
2002 TREE_OVERFLOW (t) = 1;
2003 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
2004 && !MODE_HAS_NANS (TYPE_MODE (type)))
2005 TREE_OVERFLOW (t) = 1;
2006 /* Regular overflow, conversion produced an infinity in a mode that
2007 can't represent them. */
2008 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
2009 && REAL_VALUE_ISINF (value)
2010 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
2011 TREE_OVERFLOW (t) = 1;
2012 else
2013 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2014 return t;
2015 }
2016
2017 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2018 to a floating point type. */
2019
2020 static tree
2021 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2022 {
2023 REAL_VALUE_TYPE value;
2024 tree t;
2025
2026 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2027 t = build_real (type, value);
2028
2029 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2030 return t;
2031 }
2032
2033 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2034 to another fixed-point type. */
2035
2036 static tree
2037 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2038 {
2039 FIXED_VALUE_TYPE value;
2040 tree t;
2041 bool overflow_p;
2042
2043 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2044 TYPE_SATURATING (type));
2045 t = build_fixed (type, value);
2046
2047 /* Propagate overflow flags. */
2048 if (overflow_p | TREE_OVERFLOW (arg1))
2049 TREE_OVERFLOW (t) = 1;
2050 return t;
2051 }
2052
2053 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2054 to a fixed-point type. */
2055
2056 static tree
2057 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2058 {
2059 FIXED_VALUE_TYPE value;
2060 tree t;
2061 bool overflow_p;
2062 double_int di;
2063
2064 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2065
2066 di.low = TREE_INT_CST_ELT (arg1, 0);
2067 if (TREE_INT_CST_NUNITS (arg1) == 1)
2068 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2069 else
2070 di.high = TREE_INT_CST_ELT (arg1, 1);
2071
2072 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2073 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2074 TYPE_SATURATING (type));
2075 t = build_fixed (type, value);
2076
2077 /* Propagate overflow flags. */
2078 if (overflow_p | TREE_OVERFLOW (arg1))
2079 TREE_OVERFLOW (t) = 1;
2080 return t;
2081 }
2082
2083 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2084 to a fixed-point type. */
2085
2086 static tree
2087 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2088 {
2089 FIXED_VALUE_TYPE value;
2090 tree t;
2091 bool overflow_p;
2092
2093 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2094 &TREE_REAL_CST (arg1),
2095 TYPE_SATURATING (type));
2096 t = build_fixed (type, value);
2097
2098 /* Propagate overflow flags. */
2099 if (overflow_p | TREE_OVERFLOW (arg1))
2100 TREE_OVERFLOW (t) = 1;
2101 return t;
2102 }
2103
2104 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2105 type TYPE. If no simplification can be done return NULL_TREE. */
2106
2107 static tree
2108 fold_convert_const (enum tree_code code, tree type, tree arg1)
2109 {
2110 if (TREE_TYPE (arg1) == type)
2111 return arg1;
2112
2113 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2114 || TREE_CODE (type) == OFFSET_TYPE)
2115 {
2116 if (TREE_CODE (arg1) == INTEGER_CST)
2117 return fold_convert_const_int_from_int (type, arg1);
2118 else if (TREE_CODE (arg1) == REAL_CST)
2119 return fold_convert_const_int_from_real (code, type, arg1);
2120 else if (TREE_CODE (arg1) == FIXED_CST)
2121 return fold_convert_const_int_from_fixed (type, arg1);
2122 }
2123 else if (TREE_CODE (type) == REAL_TYPE)
2124 {
2125 if (TREE_CODE (arg1) == INTEGER_CST)
2126 return build_real_from_int_cst (type, arg1);
2127 else if (TREE_CODE (arg1) == REAL_CST)
2128 return fold_convert_const_real_from_real (type, arg1);
2129 else if (TREE_CODE (arg1) == FIXED_CST)
2130 return fold_convert_const_real_from_fixed (type, arg1);
2131 }
2132 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2133 {
2134 if (TREE_CODE (arg1) == FIXED_CST)
2135 return fold_convert_const_fixed_from_fixed (type, arg1);
2136 else if (TREE_CODE (arg1) == INTEGER_CST)
2137 return fold_convert_const_fixed_from_int (type, arg1);
2138 else if (TREE_CODE (arg1) == REAL_CST)
2139 return fold_convert_const_fixed_from_real (type, arg1);
2140 }
2141 else if (TREE_CODE (type) == VECTOR_TYPE)
2142 {
2143 if (TREE_CODE (arg1) == VECTOR_CST
2144 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2145 {
2146 int len = TYPE_VECTOR_SUBPARTS (type);
2147 tree elttype = TREE_TYPE (type);
2148 tree *v = XALLOCAVEC (tree, len);
2149 for (int i = 0; i < len; ++i)
2150 {
2151 tree elt = VECTOR_CST_ELT (arg1, i);
2152 tree cvt = fold_convert_const (code, elttype, elt);
2153 if (cvt == NULL_TREE)
2154 return NULL_TREE;
2155 v[i] = cvt;
2156 }
2157 return build_vector (type, v);
2158 }
2159 }
2160 return NULL_TREE;
2161 }
2162
2163 /* Construct a vector of zero elements of vector type TYPE. */
2164
2165 static tree
2166 build_zero_vector (tree type)
2167 {
2168 tree t;
2169
2170 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2171 return build_vector_from_val (type, t);
2172 }
2173
2174 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2175
2176 bool
2177 fold_convertible_p (const_tree type, const_tree arg)
2178 {
2179 tree orig = TREE_TYPE (arg);
2180
2181 if (type == orig)
2182 return true;
2183
2184 if (TREE_CODE (arg) == ERROR_MARK
2185 || TREE_CODE (type) == ERROR_MARK
2186 || TREE_CODE (orig) == ERROR_MARK)
2187 return false;
2188
2189 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2190 return true;
2191
2192 switch (TREE_CODE (type))
2193 {
2194 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2195 case POINTER_TYPE: case REFERENCE_TYPE:
2196 case OFFSET_TYPE:
2197 return (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2198 || TREE_CODE (orig) == OFFSET_TYPE);
2199
2200 case REAL_TYPE:
2201 case FIXED_POINT_TYPE:
2202 case COMPLEX_TYPE:
2203 case VECTOR_TYPE:
2204 case VOID_TYPE:
2205 return TREE_CODE (type) == TREE_CODE (orig);
2206
2207 default:
2208 return false;
2209 }
2210 }
2211
2212 /* Convert expression ARG to type TYPE. Used by the middle-end for
2213 simple conversions in preference to calling the front-end's convert. */
2214
2215 tree
2216 fold_convert_loc (location_t loc, tree type, tree arg)
2217 {
2218 tree orig = TREE_TYPE (arg);
2219 tree tem;
2220
2221 if (type == orig)
2222 return arg;
2223
2224 if (TREE_CODE (arg) == ERROR_MARK
2225 || TREE_CODE (type) == ERROR_MARK
2226 || TREE_CODE (orig) == ERROR_MARK)
2227 return error_mark_node;
2228
2229 switch (TREE_CODE (type))
2230 {
2231 case POINTER_TYPE:
2232 case REFERENCE_TYPE:
2233 /* Handle conversions between pointers to different address spaces. */
2234 if (POINTER_TYPE_P (orig)
2235 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2236 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2237 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2238 /* fall through */
2239
2240 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2241 case OFFSET_TYPE:
2242 if (TREE_CODE (arg) == INTEGER_CST)
2243 {
2244 tem = fold_convert_const (NOP_EXPR, type, arg);
2245 if (tem != NULL_TREE)
2246 return tem;
2247 }
2248 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2249 || TREE_CODE (orig) == OFFSET_TYPE)
2250 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2251 if (TREE_CODE (orig) == COMPLEX_TYPE)
2252 return fold_convert_loc (loc, type,
2253 fold_build1_loc (loc, REALPART_EXPR,
2254 TREE_TYPE (orig), arg));
2255 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2256 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2257 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2258
2259 case REAL_TYPE:
2260 if (TREE_CODE (arg) == INTEGER_CST)
2261 {
2262 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2263 if (tem != NULL_TREE)
2264 return tem;
2265 }
2266 else if (TREE_CODE (arg) == REAL_CST)
2267 {
2268 tem = fold_convert_const (NOP_EXPR, type, arg);
2269 if (tem != NULL_TREE)
2270 return tem;
2271 }
2272 else if (TREE_CODE (arg) == FIXED_CST)
2273 {
2274 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2275 if (tem != NULL_TREE)
2276 return tem;
2277 }
2278
2279 switch (TREE_CODE (orig))
2280 {
2281 case INTEGER_TYPE:
2282 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2283 case POINTER_TYPE: case REFERENCE_TYPE:
2284 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2285
2286 case REAL_TYPE:
2287 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2288
2289 case FIXED_POINT_TYPE:
2290 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2291
2292 case COMPLEX_TYPE:
2293 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2294 return fold_convert_loc (loc, type, tem);
2295
2296 default:
2297 gcc_unreachable ();
2298 }
2299
2300 case FIXED_POINT_TYPE:
2301 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2302 || TREE_CODE (arg) == REAL_CST)
2303 {
2304 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2305 if (tem != NULL_TREE)
2306 goto fold_convert_exit;
2307 }
2308
2309 switch (TREE_CODE (orig))
2310 {
2311 case FIXED_POINT_TYPE:
2312 case INTEGER_TYPE:
2313 case ENUMERAL_TYPE:
2314 case BOOLEAN_TYPE:
2315 case REAL_TYPE:
2316 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2317
2318 case COMPLEX_TYPE:
2319 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2320 return fold_convert_loc (loc, type, tem);
2321
2322 default:
2323 gcc_unreachable ();
2324 }
2325
2326 case COMPLEX_TYPE:
2327 switch (TREE_CODE (orig))
2328 {
2329 case INTEGER_TYPE:
2330 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2331 case POINTER_TYPE: case REFERENCE_TYPE:
2332 case REAL_TYPE:
2333 case FIXED_POINT_TYPE:
2334 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2335 fold_convert_loc (loc, TREE_TYPE (type), arg),
2336 fold_convert_loc (loc, TREE_TYPE (type),
2337 integer_zero_node));
2338 case COMPLEX_TYPE:
2339 {
2340 tree rpart, ipart;
2341
2342 if (TREE_CODE (arg) == COMPLEX_EXPR)
2343 {
2344 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2345 TREE_OPERAND (arg, 0));
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2347 TREE_OPERAND (arg, 1));
2348 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2349 }
2350
2351 arg = save_expr (arg);
2352 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2353 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2354 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2355 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2356 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2357 }
2358
2359 default:
2360 gcc_unreachable ();
2361 }
2362
2363 case VECTOR_TYPE:
2364 if (integer_zerop (arg))
2365 return build_zero_vector (type);
2366 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2367 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2368 || TREE_CODE (orig) == VECTOR_TYPE);
2369 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2370
2371 case VOID_TYPE:
2372 tem = fold_ignored_result (arg);
2373 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2374
2375 default:
2376 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2377 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2378 gcc_unreachable ();
2379 }
2380 fold_convert_exit:
2381 protected_set_expr_location_unshare (tem, loc);
2382 return tem;
2383 }
2384 \f
2385 /* Return false if expr can be assumed not to be an lvalue, true
2386 otherwise. */
2387
2388 static bool
2389 maybe_lvalue_p (const_tree x)
2390 {
2391 /* We only need to wrap lvalue tree codes. */
2392 switch (TREE_CODE (x))
2393 {
2394 case VAR_DECL:
2395 case PARM_DECL:
2396 case RESULT_DECL:
2397 case LABEL_DECL:
2398 case FUNCTION_DECL:
2399 case SSA_NAME:
2400
2401 case COMPONENT_REF:
2402 case MEM_REF:
2403 case INDIRECT_REF:
2404 case ARRAY_REF:
2405 case ARRAY_RANGE_REF:
2406 case BIT_FIELD_REF:
2407 case OBJ_TYPE_REF:
2408
2409 case REALPART_EXPR:
2410 case IMAGPART_EXPR:
2411 case PREINCREMENT_EXPR:
2412 case PREDECREMENT_EXPR:
2413 case SAVE_EXPR:
2414 case TRY_CATCH_EXPR:
2415 case WITH_CLEANUP_EXPR:
2416 case COMPOUND_EXPR:
2417 case MODIFY_EXPR:
2418 case TARGET_EXPR:
2419 case COND_EXPR:
2420 case BIND_EXPR:
2421 break;
2422
2423 default:
2424 /* Assume the worst for front-end tree codes. */
2425 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2426 break;
2427 return false;
2428 }
2429
2430 return true;
2431 }
2432
2433 /* Return an expr equal to X but certainly not valid as an lvalue. */
2434
2435 tree
2436 non_lvalue_loc (location_t loc, tree x)
2437 {
2438 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2439 us. */
2440 if (in_gimple_form)
2441 return x;
2442
2443 if (! maybe_lvalue_p (x))
2444 return x;
2445 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2446 }
2447
2448 /* When pedantic, return an expr equal to X but certainly not valid as a
2449 pedantic lvalue. Otherwise, return X. */
2450
2451 static tree
2452 pedantic_non_lvalue_loc (location_t loc, tree x)
2453 {
2454 return protected_set_expr_location_unshare (x, loc);
2455 }
2456 \f
2457 /* Given a tree comparison code, return the code that is the logical inverse.
2458 It is generally not safe to do this for floating-point comparisons, except
2459 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2460 ERROR_MARK in this case. */
2461
2462 enum tree_code
2463 invert_tree_comparison (enum tree_code code, bool honor_nans)
2464 {
2465 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2466 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2467 return ERROR_MARK;
2468
2469 switch (code)
2470 {
2471 case EQ_EXPR:
2472 return NE_EXPR;
2473 case NE_EXPR:
2474 return EQ_EXPR;
2475 case GT_EXPR:
2476 return honor_nans ? UNLE_EXPR : LE_EXPR;
2477 case GE_EXPR:
2478 return honor_nans ? UNLT_EXPR : LT_EXPR;
2479 case LT_EXPR:
2480 return honor_nans ? UNGE_EXPR : GE_EXPR;
2481 case LE_EXPR:
2482 return honor_nans ? UNGT_EXPR : GT_EXPR;
2483 case LTGT_EXPR:
2484 return UNEQ_EXPR;
2485 case UNEQ_EXPR:
2486 return LTGT_EXPR;
2487 case UNGT_EXPR:
2488 return LE_EXPR;
2489 case UNGE_EXPR:
2490 return LT_EXPR;
2491 case UNLT_EXPR:
2492 return GE_EXPR;
2493 case UNLE_EXPR:
2494 return GT_EXPR;
2495 case ORDERED_EXPR:
2496 return UNORDERED_EXPR;
2497 case UNORDERED_EXPR:
2498 return ORDERED_EXPR;
2499 default:
2500 gcc_unreachable ();
2501 }
2502 }
2503
2504 /* Similar, but return the comparison that results if the operands are
2505 swapped. This is safe for floating-point. */
2506
2507 enum tree_code
2508 swap_tree_comparison (enum tree_code code)
2509 {
2510 switch (code)
2511 {
2512 case EQ_EXPR:
2513 case NE_EXPR:
2514 case ORDERED_EXPR:
2515 case UNORDERED_EXPR:
2516 case LTGT_EXPR:
2517 case UNEQ_EXPR:
2518 return code;
2519 case GT_EXPR:
2520 return LT_EXPR;
2521 case GE_EXPR:
2522 return LE_EXPR;
2523 case LT_EXPR:
2524 return GT_EXPR;
2525 case LE_EXPR:
2526 return GE_EXPR;
2527 case UNGT_EXPR:
2528 return UNLT_EXPR;
2529 case UNGE_EXPR:
2530 return UNLE_EXPR;
2531 case UNLT_EXPR:
2532 return UNGT_EXPR;
2533 case UNLE_EXPR:
2534 return UNGE_EXPR;
2535 default:
2536 gcc_unreachable ();
2537 }
2538 }
2539
2540
2541 /* Convert a comparison tree code from an enum tree_code representation
2542 into a compcode bit-based encoding. This function is the inverse of
2543 compcode_to_comparison. */
2544
2545 static enum comparison_code
2546 comparison_to_compcode (enum tree_code code)
2547 {
2548 switch (code)
2549 {
2550 case LT_EXPR:
2551 return COMPCODE_LT;
2552 case EQ_EXPR:
2553 return COMPCODE_EQ;
2554 case LE_EXPR:
2555 return COMPCODE_LE;
2556 case GT_EXPR:
2557 return COMPCODE_GT;
2558 case NE_EXPR:
2559 return COMPCODE_NE;
2560 case GE_EXPR:
2561 return COMPCODE_GE;
2562 case ORDERED_EXPR:
2563 return COMPCODE_ORD;
2564 case UNORDERED_EXPR:
2565 return COMPCODE_UNORD;
2566 case UNLT_EXPR:
2567 return COMPCODE_UNLT;
2568 case UNEQ_EXPR:
2569 return COMPCODE_UNEQ;
2570 case UNLE_EXPR:
2571 return COMPCODE_UNLE;
2572 case UNGT_EXPR:
2573 return COMPCODE_UNGT;
2574 case LTGT_EXPR:
2575 return COMPCODE_LTGT;
2576 case UNGE_EXPR:
2577 return COMPCODE_UNGE;
2578 default:
2579 gcc_unreachable ();
2580 }
2581 }
2582
2583 /* Convert a compcode bit-based encoding of a comparison operator back
2584 to GCC's enum tree_code representation. This function is the
2585 inverse of comparison_to_compcode. */
2586
2587 static enum tree_code
2588 compcode_to_comparison (enum comparison_code code)
2589 {
2590 switch (code)
2591 {
2592 case COMPCODE_LT:
2593 return LT_EXPR;
2594 case COMPCODE_EQ:
2595 return EQ_EXPR;
2596 case COMPCODE_LE:
2597 return LE_EXPR;
2598 case COMPCODE_GT:
2599 return GT_EXPR;
2600 case COMPCODE_NE:
2601 return NE_EXPR;
2602 case COMPCODE_GE:
2603 return GE_EXPR;
2604 case COMPCODE_ORD:
2605 return ORDERED_EXPR;
2606 case COMPCODE_UNORD:
2607 return UNORDERED_EXPR;
2608 case COMPCODE_UNLT:
2609 return UNLT_EXPR;
2610 case COMPCODE_UNEQ:
2611 return UNEQ_EXPR;
2612 case COMPCODE_UNLE:
2613 return UNLE_EXPR;
2614 case COMPCODE_UNGT:
2615 return UNGT_EXPR;
2616 case COMPCODE_LTGT:
2617 return LTGT_EXPR;
2618 case COMPCODE_UNGE:
2619 return UNGE_EXPR;
2620 default:
2621 gcc_unreachable ();
2622 }
2623 }
2624
2625 /* Return a tree for the comparison which is the combination of
2626 doing the AND or OR (depending on CODE) of the two operations LCODE
2627 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2628 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2629 if this makes the transformation invalid. */
2630
2631 tree
2632 combine_comparisons (location_t loc,
2633 enum tree_code code, enum tree_code lcode,
2634 enum tree_code rcode, tree truth_type,
2635 tree ll_arg, tree lr_arg)
2636 {
2637 bool honor_nans = HONOR_NANS (ll_arg);
2638 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2639 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2640 int compcode;
2641
2642 switch (code)
2643 {
2644 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2645 compcode = lcompcode & rcompcode;
2646 break;
2647
2648 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2649 compcode = lcompcode | rcompcode;
2650 break;
2651
2652 default:
2653 return NULL_TREE;
2654 }
2655
2656 if (!honor_nans)
2657 {
2658 /* Eliminate unordered comparisons, as well as LTGT and ORD
2659 which are not used unless the mode has NaNs. */
2660 compcode &= ~COMPCODE_UNORD;
2661 if (compcode == COMPCODE_LTGT)
2662 compcode = COMPCODE_NE;
2663 else if (compcode == COMPCODE_ORD)
2664 compcode = COMPCODE_TRUE;
2665 }
2666 else if (flag_trapping_math)
2667 {
2668 /* Check that the original operation and the optimized ones will trap
2669 under the same condition. */
2670 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2671 && (lcompcode != COMPCODE_EQ)
2672 && (lcompcode != COMPCODE_ORD);
2673 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2674 && (rcompcode != COMPCODE_EQ)
2675 && (rcompcode != COMPCODE_ORD);
2676 bool trap = (compcode & COMPCODE_UNORD) == 0
2677 && (compcode != COMPCODE_EQ)
2678 && (compcode != COMPCODE_ORD);
2679
2680 /* In a short-circuited boolean expression the LHS might be
2681 such that the RHS, if evaluated, will never trap. For
2682 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2683 if neither x nor y is NaN. (This is a mixed blessing: for
2684 example, the expression above will never trap, hence
2685 optimizing it to x < y would be invalid). */
2686 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2687 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2688 rtrap = false;
2689
2690 /* If the comparison was short-circuited, and only the RHS
2691 trapped, we may now generate a spurious trap. */
2692 if (rtrap && !ltrap
2693 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2694 return NULL_TREE;
2695
2696 /* If we changed the conditions that cause a trap, we lose. */
2697 if ((ltrap || rtrap) != trap)
2698 return NULL_TREE;
2699 }
2700
2701 if (compcode == COMPCODE_TRUE)
2702 return constant_boolean_node (true, truth_type);
2703 else if (compcode == COMPCODE_FALSE)
2704 return constant_boolean_node (false, truth_type);
2705 else
2706 {
2707 enum tree_code tcode;
2708
2709 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2710 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2711 }
2712 }
2713 \f
2714 /* Return nonzero if two operands (typically of the same tree node)
2715 are necessarily equal. FLAGS modifies behavior as follows:
2716
2717 If OEP_ONLY_CONST is set, only return nonzero for constants.
2718 This function tests whether the operands are indistinguishable;
2719 it does not test whether they are equal using C's == operation.
2720 The distinction is important for IEEE floating point, because
2721 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2722 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2723
2724 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2725 even though it may hold multiple values during a function.
2726 This is because a GCC tree node guarantees that nothing else is
2727 executed between the evaluation of its "operands" (which may often
2728 be evaluated in arbitrary order). Hence if the operands themselves
2729 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2730 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2731 unset means assuming isochronic (or instantaneous) tree equivalence.
2732 Unless comparing arbitrary expression trees, such as from different
2733 statements, this flag can usually be left unset.
2734
2735 If OEP_PURE_SAME is set, then pure functions with identical arguments
2736 are considered the same. It is used when the caller has other ways
2737 to ensure that global memory is unchanged in between.
2738
2739 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2740 not values of expressions.
2741
2742 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2743 any operand with side effect. This is unnecesarily conservative in the
2744 case we know that arg0 and arg1 are in disjoint code paths (such as in
2745 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2746 addresses with TREE_CONSTANT flag set so we know that &var == &var
2747 even if var is volatile. */
2748
2749 int
2750 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2751 {
2752 /* When checking, verify at the outermost operand_equal_p call that
2753 if operand_equal_p returns non-zero then ARG0 and ARG1 has the same
2754 hash value. */
2755 if (flag_checking && !(flags & OEP_NO_HASH_CHECK))
2756 {
2757 if (operand_equal_p (arg0, arg1, flags | OEP_NO_HASH_CHECK))
2758 {
2759 if (arg0 != arg1)
2760 {
2761 inchash::hash hstate0 (0), hstate1 (0);
2762 inchash::add_expr (arg0, hstate0, flags);
2763 inchash::add_expr (arg1, hstate1, flags);
2764 hashval_t h0 = hstate0.end ();
2765 hashval_t h1 = hstate1.end ();
2766 gcc_assert (h0 == h1);
2767 }
2768 return 1;
2769 }
2770 else
2771 return 0;
2772 }
2773
2774 /* If either is ERROR_MARK, they aren't equal. */
2775 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2776 || TREE_TYPE (arg0) == error_mark_node
2777 || TREE_TYPE (arg1) == error_mark_node)
2778 return 0;
2779
2780 /* Similar, if either does not have a type (like a released SSA name),
2781 they aren't equal. */
2782 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2783 return 0;
2784
2785 /* We cannot consider pointers to different address space equal. */
2786 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2787 && POINTER_TYPE_P (TREE_TYPE (arg1))
2788 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2789 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2790 return 0;
2791
2792 /* Check equality of integer constants before bailing out due to
2793 precision differences. */
2794 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2795 {
2796 /* Address of INTEGER_CST is not defined; check that we did not forget
2797 to drop the OEP_ADDRESS_OF flags. */
2798 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2799 return tree_int_cst_equal (arg0, arg1);
2800 }
2801
2802 if (!(flags & OEP_ADDRESS_OF))
2803 {
2804 /* If both types don't have the same signedness, then we can't consider
2805 them equal. We must check this before the STRIP_NOPS calls
2806 because they may change the signedness of the arguments. As pointers
2807 strictly don't have a signedness, require either two pointers or
2808 two non-pointers as well. */
2809 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2810 || POINTER_TYPE_P (TREE_TYPE (arg0))
2811 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2812 return 0;
2813
2814 /* If both types don't have the same precision, then it is not safe
2815 to strip NOPs. */
2816 if (element_precision (TREE_TYPE (arg0))
2817 != element_precision (TREE_TYPE (arg1)))
2818 return 0;
2819
2820 STRIP_NOPS (arg0);
2821 STRIP_NOPS (arg1);
2822 }
2823 #if 0
2824 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2825 sanity check once the issue is solved. */
2826 else
2827 /* Addresses of conversions and SSA_NAMEs (and many other things)
2828 are not defined. Check that we did not forget to drop the
2829 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2830 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2831 && TREE_CODE (arg0) != SSA_NAME);
2832 #endif
2833
2834 /* In case both args are comparisons but with different comparison
2835 code, try to swap the comparison operands of one arg to produce
2836 a match and compare that variant. */
2837 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2838 && COMPARISON_CLASS_P (arg0)
2839 && COMPARISON_CLASS_P (arg1))
2840 {
2841 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2842
2843 if (TREE_CODE (arg0) == swap_code)
2844 return operand_equal_p (TREE_OPERAND (arg0, 0),
2845 TREE_OPERAND (arg1, 1), flags)
2846 && operand_equal_p (TREE_OPERAND (arg0, 1),
2847 TREE_OPERAND (arg1, 0), flags);
2848 }
2849
2850 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2851 {
2852 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2853 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2854 ;
2855 else if (flags & OEP_ADDRESS_OF)
2856 {
2857 /* If we are interested in comparing addresses ignore
2858 MEM_REF wrappings of the base that can appear just for
2859 TBAA reasons. */
2860 if (TREE_CODE (arg0) == MEM_REF
2861 && DECL_P (arg1)
2862 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2863 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2864 && integer_zerop (TREE_OPERAND (arg0, 1)))
2865 return 1;
2866 else if (TREE_CODE (arg1) == MEM_REF
2867 && DECL_P (arg0)
2868 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2869 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2870 && integer_zerop (TREE_OPERAND (arg1, 1)))
2871 return 1;
2872 return 0;
2873 }
2874 else
2875 return 0;
2876 }
2877
2878 /* When not checking adddresses, this is needed for conversions and for
2879 COMPONENT_REF. Might as well play it safe and always test this. */
2880 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2881 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2882 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2883 && !(flags & OEP_ADDRESS_OF)))
2884 return 0;
2885
2886 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2887 We don't care about side effects in that case because the SAVE_EXPR
2888 takes care of that for us. In all other cases, two expressions are
2889 equal if they have no side effects. If we have two identical
2890 expressions with side effects that should be treated the same due
2891 to the only side effects being identical SAVE_EXPR's, that will
2892 be detected in the recursive calls below.
2893 If we are taking an invariant address of two identical objects
2894 they are necessarily equal as well. */
2895 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2896 && (TREE_CODE (arg0) == SAVE_EXPR
2897 || (flags & OEP_MATCH_SIDE_EFFECTS)
2898 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2899 return 1;
2900
2901 /* Next handle constant cases, those for which we can return 1 even
2902 if ONLY_CONST is set. */
2903 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2904 switch (TREE_CODE (arg0))
2905 {
2906 case INTEGER_CST:
2907 return tree_int_cst_equal (arg0, arg1);
2908
2909 case FIXED_CST:
2910 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2911 TREE_FIXED_CST (arg1));
2912
2913 case REAL_CST:
2914 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2915 return 1;
2916
2917
2918 if (!HONOR_SIGNED_ZEROS (arg0))
2919 {
2920 /* If we do not distinguish between signed and unsigned zero,
2921 consider them equal. */
2922 if (real_zerop (arg0) && real_zerop (arg1))
2923 return 1;
2924 }
2925 return 0;
2926
2927 case VECTOR_CST:
2928 {
2929 unsigned i;
2930
2931 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2932 return 0;
2933
2934 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2935 {
2936 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2937 VECTOR_CST_ELT (arg1, i), flags))
2938 return 0;
2939 }
2940 return 1;
2941 }
2942
2943 case COMPLEX_CST:
2944 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2945 flags)
2946 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2947 flags));
2948
2949 case STRING_CST:
2950 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2951 && ! memcmp (TREE_STRING_POINTER (arg0),
2952 TREE_STRING_POINTER (arg1),
2953 TREE_STRING_LENGTH (arg0)));
2954
2955 case ADDR_EXPR:
2956 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2957 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2958 flags | OEP_ADDRESS_OF
2959 | OEP_MATCH_SIDE_EFFECTS);
2960 case CONSTRUCTOR:
2961 /* In GIMPLE empty constructors are allowed in initializers of
2962 aggregates. */
2963 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2964 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2965 default:
2966 break;
2967 }
2968
2969 if (flags & OEP_ONLY_CONST)
2970 return 0;
2971
2972 /* Define macros to test an operand from arg0 and arg1 for equality and a
2973 variant that allows null and views null as being different from any
2974 non-null value. In the latter case, if either is null, the both
2975 must be; otherwise, do the normal comparison. */
2976 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2977 TREE_OPERAND (arg1, N), flags)
2978
2979 #define OP_SAME_WITH_NULL(N) \
2980 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2981 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2982
2983 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2984 {
2985 case tcc_unary:
2986 /* Two conversions are equal only if signedness and modes match. */
2987 switch (TREE_CODE (arg0))
2988 {
2989 CASE_CONVERT:
2990 case FIX_TRUNC_EXPR:
2991 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2992 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2993 return 0;
2994 break;
2995 default:
2996 break;
2997 }
2998
2999 return OP_SAME (0);
3000
3001
3002 case tcc_comparison:
3003 case tcc_binary:
3004 if (OP_SAME (0) && OP_SAME (1))
3005 return 1;
3006
3007 /* For commutative ops, allow the other order. */
3008 return (commutative_tree_code (TREE_CODE (arg0))
3009 && operand_equal_p (TREE_OPERAND (arg0, 0),
3010 TREE_OPERAND (arg1, 1), flags)
3011 && operand_equal_p (TREE_OPERAND (arg0, 1),
3012 TREE_OPERAND (arg1, 0), flags));
3013
3014 case tcc_reference:
3015 /* If either of the pointer (or reference) expressions we are
3016 dereferencing contain a side effect, these cannot be equal,
3017 but their addresses can be. */
3018 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
3019 && (TREE_SIDE_EFFECTS (arg0)
3020 || TREE_SIDE_EFFECTS (arg1)))
3021 return 0;
3022
3023 switch (TREE_CODE (arg0))
3024 {
3025 case INDIRECT_REF:
3026 if (!(flags & OEP_ADDRESS_OF)
3027 && (TYPE_ALIGN (TREE_TYPE (arg0))
3028 != TYPE_ALIGN (TREE_TYPE (arg1))))
3029 return 0;
3030 flags &= ~OEP_ADDRESS_OF;
3031 return OP_SAME (0);
3032
3033 case IMAGPART_EXPR:
3034 /* Require the same offset. */
3035 if (!operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3036 TYPE_SIZE (TREE_TYPE (arg1)),
3037 flags & ~OEP_ADDRESS_OF))
3038 return 0;
3039
3040 /* Fallthru. */
3041 case REALPART_EXPR:
3042 case VIEW_CONVERT_EXPR:
3043 return OP_SAME (0);
3044
3045 case TARGET_MEM_REF:
3046 case MEM_REF:
3047 if (!(flags & OEP_ADDRESS_OF))
3048 {
3049 /* Require equal access sizes */
3050 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3051 && (!TYPE_SIZE (TREE_TYPE (arg0))
3052 || !TYPE_SIZE (TREE_TYPE (arg1))
3053 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3054 TYPE_SIZE (TREE_TYPE (arg1)),
3055 flags)))
3056 return 0;
3057 /* Verify that access happens in similar types. */
3058 if (!types_compatible_p (TREE_TYPE (arg0), TREE_TYPE (arg1)))
3059 return 0;
3060 /* Verify that accesses are TBAA compatible. */
3061 if (!alias_ptr_types_compatible_p
3062 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3063 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3064 || (MR_DEPENDENCE_CLIQUE (arg0)
3065 != MR_DEPENDENCE_CLIQUE (arg1))
3066 || (MR_DEPENDENCE_BASE (arg0)
3067 != MR_DEPENDENCE_BASE (arg1)))
3068 return 0;
3069 /* Verify that alignment is compatible. */
3070 if (TYPE_ALIGN (TREE_TYPE (arg0))
3071 != TYPE_ALIGN (TREE_TYPE (arg1)))
3072 return 0;
3073 }
3074 flags &= ~OEP_ADDRESS_OF;
3075 return (OP_SAME (0) && OP_SAME (1)
3076 /* TARGET_MEM_REF require equal extra operands. */
3077 && (TREE_CODE (arg0) != TARGET_MEM_REF
3078 || (OP_SAME_WITH_NULL (2)
3079 && OP_SAME_WITH_NULL (3)
3080 && OP_SAME_WITH_NULL (4))));
3081
3082 case ARRAY_REF:
3083 case ARRAY_RANGE_REF:
3084 if (!OP_SAME (0))
3085 return 0;
3086 flags &= ~OEP_ADDRESS_OF;
3087 /* Compare the array index by value if it is constant first as we
3088 may have different types but same value here. */
3089 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3090 TREE_OPERAND (arg1, 1))
3091 || OP_SAME (1))
3092 && OP_SAME_WITH_NULL (2)
3093 && OP_SAME_WITH_NULL (3)
3094 /* Compare low bound and element size as with OEP_ADDRESS_OF
3095 we have to account for the offset of the ref. */
3096 && (TREE_TYPE (TREE_OPERAND (arg0, 0))
3097 == TREE_TYPE (TREE_OPERAND (arg1, 0))
3098 || (operand_equal_p (array_ref_low_bound
3099 (CONST_CAST_TREE (arg0)),
3100 array_ref_low_bound
3101 (CONST_CAST_TREE (arg1)), flags)
3102 && operand_equal_p (array_ref_element_size
3103 (CONST_CAST_TREE (arg0)),
3104 array_ref_element_size
3105 (CONST_CAST_TREE (arg1)),
3106 flags))));
3107
3108 case COMPONENT_REF:
3109 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3110 may be NULL when we're called to compare MEM_EXPRs. */
3111 if (!OP_SAME_WITH_NULL (0)
3112 || !OP_SAME (1))
3113 return 0;
3114 flags &= ~OEP_ADDRESS_OF;
3115 return OP_SAME_WITH_NULL (2);
3116
3117 case BIT_FIELD_REF:
3118 if (!OP_SAME (0))
3119 return 0;
3120 flags &= ~OEP_ADDRESS_OF;
3121 return OP_SAME (1) && OP_SAME (2);
3122
3123 default:
3124 return 0;
3125 }
3126
3127 case tcc_expression:
3128 switch (TREE_CODE (arg0))
3129 {
3130 case ADDR_EXPR:
3131 /* Be sure we pass right ADDRESS_OF flag. */
3132 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3133 return operand_equal_p (TREE_OPERAND (arg0, 0),
3134 TREE_OPERAND (arg1, 0),
3135 flags | OEP_ADDRESS_OF);
3136
3137 case TRUTH_NOT_EXPR:
3138 return OP_SAME (0);
3139
3140 case TRUTH_ANDIF_EXPR:
3141 case TRUTH_ORIF_EXPR:
3142 return OP_SAME (0) && OP_SAME (1);
3143
3144 case FMA_EXPR:
3145 case WIDEN_MULT_PLUS_EXPR:
3146 case WIDEN_MULT_MINUS_EXPR:
3147 if (!OP_SAME (2))
3148 return 0;
3149 /* The multiplcation operands are commutative. */
3150 /* FALLTHRU */
3151
3152 case TRUTH_AND_EXPR:
3153 case TRUTH_OR_EXPR:
3154 case TRUTH_XOR_EXPR:
3155 if (OP_SAME (0) && OP_SAME (1))
3156 return 1;
3157
3158 /* Otherwise take into account this is a commutative operation. */
3159 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3160 TREE_OPERAND (arg1, 1), flags)
3161 && operand_equal_p (TREE_OPERAND (arg0, 1),
3162 TREE_OPERAND (arg1, 0), flags));
3163
3164 case COND_EXPR:
3165 if (! OP_SAME (1) || ! OP_SAME (2))
3166 return 0;
3167 flags &= ~OEP_ADDRESS_OF;
3168 return OP_SAME (0);
3169
3170 case VEC_COND_EXPR:
3171 case DOT_PROD_EXPR:
3172 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3173
3174 default:
3175 return 0;
3176 }
3177
3178 case tcc_vl_exp:
3179 switch (TREE_CODE (arg0))
3180 {
3181 case CALL_EXPR:
3182 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3183 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3184 /* If not both CALL_EXPRs are either internal or normal function
3185 functions, then they are not equal. */
3186 return 0;
3187 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3188 {
3189 /* If the CALL_EXPRs call different internal functions, then they
3190 are not equal. */
3191 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3192 return 0;
3193 }
3194 else
3195 {
3196 /* If the CALL_EXPRs call different functions, then they are not
3197 equal. */
3198 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3199 flags))
3200 return 0;
3201 }
3202
3203 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3204 {
3205 unsigned int cef = call_expr_flags (arg0);
3206 if (flags & OEP_PURE_SAME)
3207 cef &= ECF_CONST | ECF_PURE;
3208 else
3209 cef &= ECF_CONST;
3210 if (!cef)
3211 return 0;
3212 }
3213
3214 /* Now see if all the arguments are the same. */
3215 {
3216 const_call_expr_arg_iterator iter0, iter1;
3217 const_tree a0, a1;
3218 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3219 a1 = first_const_call_expr_arg (arg1, &iter1);
3220 a0 && a1;
3221 a0 = next_const_call_expr_arg (&iter0),
3222 a1 = next_const_call_expr_arg (&iter1))
3223 if (! operand_equal_p (a0, a1, flags))
3224 return 0;
3225
3226 /* If we get here and both argument lists are exhausted
3227 then the CALL_EXPRs are equal. */
3228 return ! (a0 || a1);
3229 }
3230 default:
3231 return 0;
3232 }
3233
3234 case tcc_declaration:
3235 /* Consider __builtin_sqrt equal to sqrt. */
3236 return (TREE_CODE (arg0) == FUNCTION_DECL
3237 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3238 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3239 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3240
3241 case tcc_exceptional:
3242 if (TREE_CODE (arg0) == CONSTRUCTOR)
3243 {
3244 /* In GIMPLE constructors are used only to build vectors from
3245 elements. Individual elements in the constructor must be
3246 indexed in increasing order and form an initial sequence.
3247
3248 We make no effort to compare constructors in generic.
3249 (see sem_variable::equals in ipa-icf which can do so for
3250 constants). */
3251 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3252 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3253 return 0;
3254
3255 /* Be sure that vectors constructed have the same representation.
3256 We only tested element precision and modes to match.
3257 Vectors may be BLKmode and thus also check that the number of
3258 parts match. */
3259 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3260 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3261 return 0;
3262
3263 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3264 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3265 unsigned int len = vec_safe_length (v0);
3266
3267 if (len != vec_safe_length (v1))
3268 return 0;
3269
3270 for (unsigned int i = 0; i < len; i++)
3271 {
3272 constructor_elt *c0 = &(*v0)[i];
3273 constructor_elt *c1 = &(*v1)[i];
3274
3275 if (!operand_equal_p (c0->value, c1->value, flags)
3276 /* In GIMPLE the indexes can be either NULL or matching i.
3277 Double check this so we won't get false
3278 positives for GENERIC. */
3279 || (c0->index
3280 && (TREE_CODE (c0->index) != INTEGER_CST
3281 || !compare_tree_int (c0->index, i)))
3282 || (c1->index
3283 && (TREE_CODE (c1->index) != INTEGER_CST
3284 || !compare_tree_int (c1->index, i))))
3285 return 0;
3286 }
3287 return 1;
3288 }
3289 return 0;
3290
3291 default:
3292 return 0;
3293 }
3294
3295 #undef OP_SAME
3296 #undef OP_SAME_WITH_NULL
3297 }
3298 \f
3299 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3300 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3301
3302 When in doubt, return 0. */
3303
3304 static int
3305 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3306 {
3307 int unsignedp1, unsignedpo;
3308 tree primarg0, primarg1, primother;
3309 unsigned int correct_width;
3310
3311 if (operand_equal_p (arg0, arg1, 0))
3312 return 1;
3313
3314 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3315 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3316 return 0;
3317
3318 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3319 and see if the inner values are the same. This removes any
3320 signedness comparison, which doesn't matter here. */
3321 primarg0 = arg0, primarg1 = arg1;
3322 STRIP_NOPS (primarg0);
3323 STRIP_NOPS (primarg1);
3324 if (operand_equal_p (primarg0, primarg1, 0))
3325 return 1;
3326
3327 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3328 actual comparison operand, ARG0.
3329
3330 First throw away any conversions to wider types
3331 already present in the operands. */
3332
3333 primarg1 = get_narrower (arg1, &unsignedp1);
3334 primother = get_narrower (other, &unsignedpo);
3335
3336 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3337 if (unsignedp1 == unsignedpo
3338 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3339 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3340 {
3341 tree type = TREE_TYPE (arg0);
3342
3343 /* Make sure shorter operand is extended the right way
3344 to match the longer operand. */
3345 primarg1 = fold_convert (signed_or_unsigned_type_for
3346 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3347
3348 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3349 return 1;
3350 }
3351
3352 return 0;
3353 }
3354 \f
3355 /* See if ARG is an expression that is either a comparison or is performing
3356 arithmetic on comparisons. The comparisons must only be comparing
3357 two different values, which will be stored in *CVAL1 and *CVAL2; if
3358 they are nonzero it means that some operands have already been found.
3359 No variables may be used anywhere else in the expression except in the
3360 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3361 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3362
3363 If this is true, return 1. Otherwise, return zero. */
3364
3365 static int
3366 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3367 {
3368 enum tree_code code = TREE_CODE (arg);
3369 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3370
3371 /* We can handle some of the tcc_expression cases here. */
3372 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3373 tclass = tcc_unary;
3374 else if (tclass == tcc_expression
3375 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3376 || code == COMPOUND_EXPR))
3377 tclass = tcc_binary;
3378
3379 else if (tclass == tcc_expression && code == SAVE_EXPR
3380 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3381 {
3382 /* If we've already found a CVAL1 or CVAL2, this expression is
3383 two complex to handle. */
3384 if (*cval1 || *cval2)
3385 return 0;
3386
3387 tclass = tcc_unary;
3388 *save_p = 1;
3389 }
3390
3391 switch (tclass)
3392 {
3393 case tcc_unary:
3394 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3395
3396 case tcc_binary:
3397 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3398 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3399 cval1, cval2, save_p));
3400
3401 case tcc_constant:
3402 return 1;
3403
3404 case tcc_expression:
3405 if (code == COND_EXPR)
3406 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3407 cval1, cval2, save_p)
3408 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3409 cval1, cval2, save_p)
3410 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3411 cval1, cval2, save_p));
3412 return 0;
3413
3414 case tcc_comparison:
3415 /* First see if we can handle the first operand, then the second. For
3416 the second operand, we know *CVAL1 can't be zero. It must be that
3417 one side of the comparison is each of the values; test for the
3418 case where this isn't true by failing if the two operands
3419 are the same. */
3420
3421 if (operand_equal_p (TREE_OPERAND (arg, 0),
3422 TREE_OPERAND (arg, 1), 0))
3423 return 0;
3424
3425 if (*cval1 == 0)
3426 *cval1 = TREE_OPERAND (arg, 0);
3427 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3428 ;
3429 else if (*cval2 == 0)
3430 *cval2 = TREE_OPERAND (arg, 0);
3431 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3432 ;
3433 else
3434 return 0;
3435
3436 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3437 ;
3438 else if (*cval2 == 0)
3439 *cval2 = TREE_OPERAND (arg, 1);
3440 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3441 ;
3442 else
3443 return 0;
3444
3445 return 1;
3446
3447 default:
3448 return 0;
3449 }
3450 }
3451 \f
3452 /* ARG is a tree that is known to contain just arithmetic operations and
3453 comparisons. Evaluate the operations in the tree substituting NEW0 for
3454 any occurrence of OLD0 as an operand of a comparison and likewise for
3455 NEW1 and OLD1. */
3456
3457 static tree
3458 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3459 tree old1, tree new1)
3460 {
3461 tree type = TREE_TYPE (arg);
3462 enum tree_code code = TREE_CODE (arg);
3463 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3464
3465 /* We can handle some of the tcc_expression cases here. */
3466 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3467 tclass = tcc_unary;
3468 else if (tclass == tcc_expression
3469 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3470 tclass = tcc_binary;
3471
3472 switch (tclass)
3473 {
3474 case tcc_unary:
3475 return fold_build1_loc (loc, code, type,
3476 eval_subst (loc, TREE_OPERAND (arg, 0),
3477 old0, new0, old1, new1));
3478
3479 case tcc_binary:
3480 return fold_build2_loc (loc, code, type,
3481 eval_subst (loc, TREE_OPERAND (arg, 0),
3482 old0, new0, old1, new1),
3483 eval_subst (loc, TREE_OPERAND (arg, 1),
3484 old0, new0, old1, new1));
3485
3486 case tcc_expression:
3487 switch (code)
3488 {
3489 case SAVE_EXPR:
3490 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3491 old1, new1);
3492
3493 case COMPOUND_EXPR:
3494 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3495 old1, new1);
3496
3497 case COND_EXPR:
3498 return fold_build3_loc (loc, code, type,
3499 eval_subst (loc, TREE_OPERAND (arg, 0),
3500 old0, new0, old1, new1),
3501 eval_subst (loc, TREE_OPERAND (arg, 1),
3502 old0, new0, old1, new1),
3503 eval_subst (loc, TREE_OPERAND (arg, 2),
3504 old0, new0, old1, new1));
3505 default:
3506 break;
3507 }
3508 /* Fall through - ??? */
3509
3510 case tcc_comparison:
3511 {
3512 tree arg0 = TREE_OPERAND (arg, 0);
3513 tree arg1 = TREE_OPERAND (arg, 1);
3514
3515 /* We need to check both for exact equality and tree equality. The
3516 former will be true if the operand has a side-effect. In that
3517 case, we know the operand occurred exactly once. */
3518
3519 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3520 arg0 = new0;
3521 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3522 arg0 = new1;
3523
3524 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3525 arg1 = new0;
3526 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3527 arg1 = new1;
3528
3529 return fold_build2_loc (loc, code, type, arg0, arg1);
3530 }
3531
3532 default:
3533 return arg;
3534 }
3535 }
3536 \f
3537 /* Return a tree for the case when the result of an expression is RESULT
3538 converted to TYPE and OMITTED was previously an operand of the expression
3539 but is now not needed (e.g., we folded OMITTED * 0).
3540
3541 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3542 the conversion of RESULT to TYPE. */
3543
3544 tree
3545 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3546 {
3547 tree t = fold_convert_loc (loc, type, result);
3548
3549 /* If the resulting operand is an empty statement, just return the omitted
3550 statement casted to void. */
3551 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3552 return build1_loc (loc, NOP_EXPR, void_type_node,
3553 fold_ignored_result (omitted));
3554
3555 if (TREE_SIDE_EFFECTS (omitted))
3556 return build2_loc (loc, COMPOUND_EXPR, type,
3557 fold_ignored_result (omitted), t);
3558
3559 return non_lvalue_loc (loc, t);
3560 }
3561
3562 /* Return a tree for the case when the result of an expression is RESULT
3563 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3564 of the expression but are now not needed.
3565
3566 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3567 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3568 evaluated before OMITTED2. Otherwise, if neither has side effects,
3569 just do the conversion of RESULT to TYPE. */
3570
3571 tree
3572 omit_two_operands_loc (location_t loc, tree type, tree result,
3573 tree omitted1, tree omitted2)
3574 {
3575 tree t = fold_convert_loc (loc, type, result);
3576
3577 if (TREE_SIDE_EFFECTS (omitted2))
3578 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3579 if (TREE_SIDE_EFFECTS (omitted1))
3580 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3581
3582 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3583 }
3584
3585 \f
3586 /* Return a simplified tree node for the truth-negation of ARG. This
3587 never alters ARG itself. We assume that ARG is an operation that
3588 returns a truth value (0 or 1).
3589
3590 FIXME: one would think we would fold the result, but it causes
3591 problems with the dominator optimizer. */
3592
3593 static tree
3594 fold_truth_not_expr (location_t loc, tree arg)
3595 {
3596 tree type = TREE_TYPE (arg);
3597 enum tree_code code = TREE_CODE (arg);
3598 location_t loc1, loc2;
3599
3600 /* If this is a comparison, we can simply invert it, except for
3601 floating-point non-equality comparisons, in which case we just
3602 enclose a TRUTH_NOT_EXPR around what we have. */
3603
3604 if (TREE_CODE_CLASS (code) == tcc_comparison)
3605 {
3606 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3607 if (FLOAT_TYPE_P (op_type)
3608 && flag_trapping_math
3609 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3610 && code != NE_EXPR && code != EQ_EXPR)
3611 return NULL_TREE;
3612
3613 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3614 if (code == ERROR_MARK)
3615 return NULL_TREE;
3616
3617 tree ret = build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3618 TREE_OPERAND (arg, 1));
3619 if (TREE_NO_WARNING (arg))
3620 TREE_NO_WARNING (ret) = 1;
3621 return ret;
3622 }
3623
3624 switch (code)
3625 {
3626 case INTEGER_CST:
3627 return constant_boolean_node (integer_zerop (arg), type);
3628
3629 case TRUTH_AND_EXPR:
3630 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3631 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3632 return build2_loc (loc, TRUTH_OR_EXPR, type,
3633 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3634 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3635
3636 case TRUTH_OR_EXPR:
3637 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3638 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3639 return build2_loc (loc, TRUTH_AND_EXPR, type,
3640 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3641 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3642
3643 case TRUTH_XOR_EXPR:
3644 /* Here we can invert either operand. We invert the first operand
3645 unless the second operand is a TRUTH_NOT_EXPR in which case our
3646 result is the XOR of the first operand with the inside of the
3647 negation of the second operand. */
3648
3649 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3650 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3651 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3652 else
3653 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3654 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3655 TREE_OPERAND (arg, 1));
3656
3657 case TRUTH_ANDIF_EXPR:
3658 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3659 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3660 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3661 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3662 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3663
3664 case TRUTH_ORIF_EXPR:
3665 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3666 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3667 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3668 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3669 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3670
3671 case TRUTH_NOT_EXPR:
3672 return TREE_OPERAND (arg, 0);
3673
3674 case COND_EXPR:
3675 {
3676 tree arg1 = TREE_OPERAND (arg, 1);
3677 tree arg2 = TREE_OPERAND (arg, 2);
3678
3679 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3680 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3681
3682 /* A COND_EXPR may have a throw as one operand, which
3683 then has void type. Just leave void operands
3684 as they are. */
3685 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3686 VOID_TYPE_P (TREE_TYPE (arg1))
3687 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3688 VOID_TYPE_P (TREE_TYPE (arg2))
3689 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3690 }
3691
3692 case COMPOUND_EXPR:
3693 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3694 return build2_loc (loc, COMPOUND_EXPR, type,
3695 TREE_OPERAND (arg, 0),
3696 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3697
3698 case NON_LVALUE_EXPR:
3699 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3700 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3701
3702 CASE_CONVERT:
3703 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3704 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3705
3706 /* ... fall through ... */
3707
3708 case FLOAT_EXPR:
3709 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3710 return build1_loc (loc, TREE_CODE (arg), type,
3711 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3712
3713 case BIT_AND_EXPR:
3714 if (!integer_onep (TREE_OPERAND (arg, 1)))
3715 return NULL_TREE;
3716 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3717
3718 case SAVE_EXPR:
3719 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3720
3721 case CLEANUP_POINT_EXPR:
3722 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3723 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3724 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3725
3726 default:
3727 return NULL_TREE;
3728 }
3729 }
3730
3731 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3732 assume that ARG is an operation that returns a truth value (0 or 1
3733 for scalars, 0 or -1 for vectors). Return the folded expression if
3734 folding is successful. Otherwise, return NULL_TREE. */
3735
3736 static tree
3737 fold_invert_truthvalue (location_t loc, tree arg)
3738 {
3739 tree type = TREE_TYPE (arg);
3740 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3741 ? BIT_NOT_EXPR
3742 : TRUTH_NOT_EXPR,
3743 type, arg);
3744 }
3745
3746 /* Return a simplified tree node for the truth-negation of ARG. This
3747 never alters ARG itself. We assume that ARG is an operation that
3748 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3749
3750 tree
3751 invert_truthvalue_loc (location_t loc, tree arg)
3752 {
3753 if (TREE_CODE (arg) == ERROR_MARK)
3754 return arg;
3755
3756 tree type = TREE_TYPE (arg);
3757 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3758 ? BIT_NOT_EXPR
3759 : TRUTH_NOT_EXPR,
3760 type, arg);
3761 }
3762
3763 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3764 with code CODE. This optimization is unsafe. */
3765 static tree
3766 distribute_real_division (location_t loc, enum tree_code code, tree type,
3767 tree arg0, tree arg1)
3768 {
3769 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3770 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3771
3772 /* (A / C) +- (B / C) -> (A +- B) / C. */
3773 if (mul0 == mul1
3774 && operand_equal_p (TREE_OPERAND (arg0, 1),
3775 TREE_OPERAND (arg1, 1), 0))
3776 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3777 fold_build2_loc (loc, code, type,
3778 TREE_OPERAND (arg0, 0),
3779 TREE_OPERAND (arg1, 0)),
3780 TREE_OPERAND (arg0, 1));
3781
3782 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3783 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3784 TREE_OPERAND (arg1, 0), 0)
3785 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3786 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3787 {
3788 REAL_VALUE_TYPE r0, r1;
3789 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3790 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3791 if (!mul0)
3792 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3793 if (!mul1)
3794 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3795 real_arithmetic (&r0, code, &r0, &r1);
3796 return fold_build2_loc (loc, MULT_EXPR, type,
3797 TREE_OPERAND (arg0, 0),
3798 build_real (type, r0));
3799 }
3800
3801 return NULL_TREE;
3802 }
3803 \f
3804 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3805 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3806 and uses reverse storage order if REVERSEP is nonzero. */
3807
3808 static tree
3809 make_bit_field_ref (location_t loc, tree inner, tree type,
3810 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3811 int unsignedp, int reversep)
3812 {
3813 tree result, bftype;
3814
3815 if (bitpos == 0 && !reversep)
3816 {
3817 tree size = TYPE_SIZE (TREE_TYPE (inner));
3818 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3819 || POINTER_TYPE_P (TREE_TYPE (inner)))
3820 && tree_fits_shwi_p (size)
3821 && tree_to_shwi (size) == bitsize)
3822 return fold_convert_loc (loc, type, inner);
3823 }
3824
3825 bftype = type;
3826 if (TYPE_PRECISION (bftype) != bitsize
3827 || TYPE_UNSIGNED (bftype) == !unsignedp)
3828 bftype = build_nonstandard_integer_type (bitsize, 0);
3829
3830 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3831 size_int (bitsize), bitsize_int (bitpos));
3832 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3833
3834 if (bftype != type)
3835 result = fold_convert_loc (loc, type, result);
3836
3837 return result;
3838 }
3839
3840 /* Optimize a bit-field compare.
3841
3842 There are two cases: First is a compare against a constant and the
3843 second is a comparison of two items where the fields are at the same
3844 bit position relative to the start of a chunk (byte, halfword, word)
3845 large enough to contain it. In these cases we can avoid the shift
3846 implicit in bitfield extractions.
3847
3848 For constants, we emit a compare of the shifted constant with the
3849 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3850 compared. For two fields at the same position, we do the ANDs with the
3851 similar mask and compare the result of the ANDs.
3852
3853 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3854 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3855 are the left and right operands of the comparison, respectively.
3856
3857 If the optimization described above can be done, we return the resulting
3858 tree. Otherwise we return zero. */
3859
3860 static tree
3861 optimize_bit_field_compare (location_t loc, enum tree_code code,
3862 tree compare_type, tree lhs, tree rhs)
3863 {
3864 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3865 tree type = TREE_TYPE (lhs);
3866 tree unsigned_type;
3867 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3868 machine_mode lmode, rmode, nmode;
3869 int lunsignedp, runsignedp;
3870 int lreversep, rreversep;
3871 int lvolatilep = 0, rvolatilep = 0;
3872 tree linner, rinner = NULL_TREE;
3873 tree mask;
3874 tree offset;
3875
3876 /* Get all the information about the extractions being done. If the bit size
3877 if the same as the size of the underlying object, we aren't doing an
3878 extraction at all and so can do nothing. We also don't want to
3879 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3880 then will no longer be able to replace it. */
3881 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3882 &lunsignedp, &lreversep, &lvolatilep, false);
3883 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3884 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3885 return 0;
3886
3887 if (const_p)
3888 rreversep = lreversep;
3889 else
3890 {
3891 /* If this is not a constant, we can only do something if bit positions,
3892 sizes, signedness and storage order are the same. */
3893 rinner
3894 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3895 &runsignedp, &rreversep, &rvolatilep, false);
3896
3897 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3898 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3899 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3900 return 0;
3901 }
3902
3903 /* See if we can find a mode to refer to this field. We should be able to,
3904 but fail if we can't. */
3905 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3906 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3907 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3908 TYPE_ALIGN (TREE_TYPE (rinner))),
3909 word_mode, false);
3910 if (nmode == VOIDmode)
3911 return 0;
3912
3913 /* Set signed and unsigned types of the precision of this mode for the
3914 shifts below. */
3915 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3916
3917 /* Compute the bit position and size for the new reference and our offset
3918 within it. If the new reference is the same size as the original, we
3919 won't optimize anything, so return zero. */
3920 nbitsize = GET_MODE_BITSIZE (nmode);
3921 nbitpos = lbitpos & ~ (nbitsize - 1);
3922 lbitpos -= nbitpos;
3923 if (nbitsize == lbitsize)
3924 return 0;
3925
3926 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3927 lbitpos = nbitsize - lbitsize - lbitpos;
3928
3929 /* Make the mask to be used against the extracted field. */
3930 mask = build_int_cst_type (unsigned_type, -1);
3931 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3932 mask = const_binop (RSHIFT_EXPR, mask,
3933 size_int (nbitsize - lbitsize - lbitpos));
3934
3935 if (! const_p)
3936 /* If not comparing with constant, just rework the comparison
3937 and return. */
3938 return fold_build2_loc (loc, code, compare_type,
3939 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3940 make_bit_field_ref (loc, linner,
3941 unsigned_type,
3942 nbitsize, nbitpos,
3943 1, lreversep),
3944 mask),
3945 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3946 make_bit_field_ref (loc, rinner,
3947 unsigned_type,
3948 nbitsize, nbitpos,
3949 1, rreversep),
3950 mask));
3951
3952 /* Otherwise, we are handling the constant case. See if the constant is too
3953 big for the field. Warn and return a tree for 0 (false) if so. We do
3954 this not only for its own sake, but to avoid having to test for this
3955 error case below. If we didn't, we might generate wrong code.
3956
3957 For unsigned fields, the constant shifted right by the field length should
3958 be all zero. For signed fields, the high-order bits should agree with
3959 the sign bit. */
3960
3961 if (lunsignedp)
3962 {
3963 if (wi::lrshift (rhs, lbitsize) != 0)
3964 {
3965 warning (0, "comparison is always %d due to width of bit-field",
3966 code == NE_EXPR);
3967 return constant_boolean_node (code == NE_EXPR, compare_type);
3968 }
3969 }
3970 else
3971 {
3972 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3973 if (tem != 0 && tem != -1)
3974 {
3975 warning (0, "comparison is always %d due to width of bit-field",
3976 code == NE_EXPR);
3977 return constant_boolean_node (code == NE_EXPR, compare_type);
3978 }
3979 }
3980
3981 /* Single-bit compares should always be against zero. */
3982 if (lbitsize == 1 && ! integer_zerop (rhs))
3983 {
3984 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3985 rhs = build_int_cst (type, 0);
3986 }
3987
3988 /* Make a new bitfield reference, shift the constant over the
3989 appropriate number of bits and mask it with the computed mask
3990 (in case this was a signed field). If we changed it, make a new one. */
3991 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3992 lreversep);
3993
3994 rhs = const_binop (BIT_AND_EXPR,
3995 const_binop (LSHIFT_EXPR,
3996 fold_convert_loc (loc, unsigned_type, rhs),
3997 size_int (lbitpos)),
3998 mask);
3999
4000 lhs = build2_loc (loc, code, compare_type,
4001 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
4002 return lhs;
4003 }
4004 \f
4005 /* Subroutine for fold_truth_andor_1: decode a field reference.
4006
4007 If EXP is a comparison reference, we return the innermost reference.
4008
4009 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
4010 set to the starting bit number.
4011
4012 If the innermost field can be completely contained in a mode-sized
4013 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
4014
4015 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
4016 otherwise it is not changed.
4017
4018 *PUNSIGNEDP is set to the signedness of the field.
4019
4020 *PREVERSEP is set to the storage order of the field.
4021
4022 *PMASK is set to the mask used. This is either contained in a
4023 BIT_AND_EXPR or derived from the width of the field.
4024
4025 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
4026
4027 Return 0 if this is not a component reference or is one that we can't
4028 do anything with. */
4029
4030 static tree
4031 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
4032 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
4033 int *punsignedp, int *preversep, int *pvolatilep,
4034 tree *pmask, tree *pand_mask)
4035 {
4036 tree outer_type = 0;
4037 tree and_mask = 0;
4038 tree mask, inner, offset;
4039 tree unsigned_type;
4040 unsigned int precision;
4041
4042 /* All the optimizations using this function assume integer fields.
4043 There are problems with FP fields since the type_for_size call
4044 below can fail for, e.g., XFmode. */
4045 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
4046 return 0;
4047
4048 /* We are interested in the bare arrangement of bits, so strip everything
4049 that doesn't affect the machine mode. However, record the type of the
4050 outermost expression if it may matter below. */
4051 if (CONVERT_EXPR_P (exp)
4052 || TREE_CODE (exp) == NON_LVALUE_EXPR)
4053 outer_type = TREE_TYPE (exp);
4054 STRIP_NOPS (exp);
4055
4056 if (TREE_CODE (exp) == BIT_AND_EXPR)
4057 {
4058 and_mask = TREE_OPERAND (exp, 1);
4059 exp = TREE_OPERAND (exp, 0);
4060 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4061 if (TREE_CODE (and_mask) != INTEGER_CST)
4062 return 0;
4063 }
4064
4065 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4066 punsignedp, preversep, pvolatilep, false);
4067 if ((inner == exp && and_mask == 0)
4068 || *pbitsize < 0 || offset != 0
4069 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4070 return 0;
4071
4072 /* If the number of bits in the reference is the same as the bitsize of
4073 the outer type, then the outer type gives the signedness. Otherwise
4074 (in case of a small bitfield) the signedness is unchanged. */
4075 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4076 *punsignedp = TYPE_UNSIGNED (outer_type);
4077
4078 /* Compute the mask to access the bitfield. */
4079 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4080 precision = TYPE_PRECISION (unsigned_type);
4081
4082 mask = build_int_cst_type (unsigned_type, -1);
4083
4084 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4085 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4086
4087 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4088 if (and_mask != 0)
4089 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4090 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4091
4092 *pmask = mask;
4093 *pand_mask = and_mask;
4094 return inner;
4095 }
4096
4097 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4098 bit positions and MASK is SIGNED. */
4099
4100 static int
4101 all_ones_mask_p (const_tree mask, unsigned int size)
4102 {
4103 tree type = TREE_TYPE (mask);
4104 unsigned int precision = TYPE_PRECISION (type);
4105
4106 /* If this function returns true when the type of the mask is
4107 UNSIGNED, then there will be errors. In particular see
4108 gcc.c-torture/execute/990326-1.c. There does not appear to be
4109 any documentation paper trail as to why this is so. But the pre
4110 wide-int worked with that restriction and it has been preserved
4111 here. */
4112 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4113 return false;
4114
4115 return wi::mask (size, false, precision) == mask;
4116 }
4117
4118 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4119 represents the sign bit of EXP's type. If EXP represents a sign
4120 or zero extension, also test VAL against the unextended type.
4121 The return value is the (sub)expression whose sign bit is VAL,
4122 or NULL_TREE otherwise. */
4123
4124 tree
4125 sign_bit_p (tree exp, const_tree val)
4126 {
4127 int width;
4128 tree t;
4129
4130 /* Tree EXP must have an integral type. */
4131 t = TREE_TYPE (exp);
4132 if (! INTEGRAL_TYPE_P (t))
4133 return NULL_TREE;
4134
4135 /* Tree VAL must be an integer constant. */
4136 if (TREE_CODE (val) != INTEGER_CST
4137 || TREE_OVERFLOW (val))
4138 return NULL_TREE;
4139
4140 width = TYPE_PRECISION (t);
4141 if (wi::only_sign_bit_p (val, width))
4142 return exp;
4143
4144 /* Handle extension from a narrower type. */
4145 if (TREE_CODE (exp) == NOP_EXPR
4146 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4147 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4148
4149 return NULL_TREE;
4150 }
4151
4152 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4153 to be evaluated unconditionally. */
4154
4155 static int
4156 simple_operand_p (const_tree exp)
4157 {
4158 /* Strip any conversions that don't change the machine mode. */
4159 STRIP_NOPS (exp);
4160
4161 return (CONSTANT_CLASS_P (exp)
4162 || TREE_CODE (exp) == SSA_NAME
4163 || (DECL_P (exp)
4164 && ! TREE_ADDRESSABLE (exp)
4165 && ! TREE_THIS_VOLATILE (exp)
4166 && ! DECL_NONLOCAL (exp)
4167 /* Don't regard global variables as simple. They may be
4168 allocated in ways unknown to the compiler (shared memory,
4169 #pragma weak, etc). */
4170 && ! TREE_PUBLIC (exp)
4171 && ! DECL_EXTERNAL (exp)
4172 /* Weakrefs are not safe to be read, since they can be NULL.
4173 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4174 have DECL_WEAK flag set. */
4175 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4176 /* Loading a static variable is unduly expensive, but global
4177 registers aren't expensive. */
4178 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4179 }
4180
4181 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4182 to be evaluated unconditionally.
4183 I addition to simple_operand_p, we assume that comparisons, conversions,
4184 and logic-not operations are simple, if their operands are simple, too. */
4185
4186 static bool
4187 simple_operand_p_2 (tree exp)
4188 {
4189 enum tree_code code;
4190
4191 if (TREE_SIDE_EFFECTS (exp)
4192 || tree_could_trap_p (exp))
4193 return false;
4194
4195 while (CONVERT_EXPR_P (exp))
4196 exp = TREE_OPERAND (exp, 0);
4197
4198 code = TREE_CODE (exp);
4199
4200 if (TREE_CODE_CLASS (code) == tcc_comparison)
4201 return (simple_operand_p (TREE_OPERAND (exp, 0))
4202 && simple_operand_p (TREE_OPERAND (exp, 1)));
4203
4204 if (code == TRUTH_NOT_EXPR)
4205 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4206
4207 return simple_operand_p (exp);
4208 }
4209
4210 \f
4211 /* The following functions are subroutines to fold_range_test and allow it to
4212 try to change a logical combination of comparisons into a range test.
4213
4214 For example, both
4215 X == 2 || X == 3 || X == 4 || X == 5
4216 and
4217 X >= 2 && X <= 5
4218 are converted to
4219 (unsigned) (X - 2) <= 3
4220
4221 We describe each set of comparisons as being either inside or outside
4222 a range, using a variable named like IN_P, and then describe the
4223 range with a lower and upper bound. If one of the bounds is omitted,
4224 it represents either the highest or lowest value of the type.
4225
4226 In the comments below, we represent a range by two numbers in brackets
4227 preceded by a "+" to designate being inside that range, or a "-" to
4228 designate being outside that range, so the condition can be inverted by
4229 flipping the prefix. An omitted bound is represented by a "-". For
4230 example, "- [-, 10]" means being outside the range starting at the lowest
4231 possible value and ending at 10, in other words, being greater than 10.
4232 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4233 always false.
4234
4235 We set up things so that the missing bounds are handled in a consistent
4236 manner so neither a missing bound nor "true" and "false" need to be
4237 handled using a special case. */
4238
4239 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4240 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4241 and UPPER1_P are nonzero if the respective argument is an upper bound
4242 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4243 must be specified for a comparison. ARG1 will be converted to ARG0's
4244 type if both are specified. */
4245
4246 static tree
4247 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4248 tree arg1, int upper1_p)
4249 {
4250 tree tem;
4251 int result;
4252 int sgn0, sgn1;
4253
4254 /* If neither arg represents infinity, do the normal operation.
4255 Else, if not a comparison, return infinity. Else handle the special
4256 comparison rules. Note that most of the cases below won't occur, but
4257 are handled for consistency. */
4258
4259 if (arg0 != 0 && arg1 != 0)
4260 {
4261 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4262 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4263 STRIP_NOPS (tem);
4264 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4265 }
4266
4267 if (TREE_CODE_CLASS (code) != tcc_comparison)
4268 return 0;
4269
4270 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4271 for neither. In real maths, we cannot assume open ended ranges are
4272 the same. But, this is computer arithmetic, where numbers are finite.
4273 We can therefore make the transformation of any unbounded range with
4274 the value Z, Z being greater than any representable number. This permits
4275 us to treat unbounded ranges as equal. */
4276 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4277 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4278 switch (code)
4279 {
4280 case EQ_EXPR:
4281 result = sgn0 == sgn1;
4282 break;
4283 case NE_EXPR:
4284 result = sgn0 != sgn1;
4285 break;
4286 case LT_EXPR:
4287 result = sgn0 < sgn1;
4288 break;
4289 case LE_EXPR:
4290 result = sgn0 <= sgn1;
4291 break;
4292 case GT_EXPR:
4293 result = sgn0 > sgn1;
4294 break;
4295 case GE_EXPR:
4296 result = sgn0 >= sgn1;
4297 break;
4298 default:
4299 gcc_unreachable ();
4300 }
4301
4302 return constant_boolean_node (result, type);
4303 }
4304 \f
4305 /* Helper routine for make_range. Perform one step for it, return
4306 new expression if the loop should continue or NULL_TREE if it should
4307 stop. */
4308
4309 tree
4310 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4311 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4312 bool *strict_overflow_p)
4313 {
4314 tree arg0_type = TREE_TYPE (arg0);
4315 tree n_low, n_high, low = *p_low, high = *p_high;
4316 int in_p = *p_in_p, n_in_p;
4317
4318 switch (code)
4319 {
4320 case TRUTH_NOT_EXPR:
4321 /* We can only do something if the range is testing for zero. */
4322 if (low == NULL_TREE || high == NULL_TREE
4323 || ! integer_zerop (low) || ! integer_zerop (high))
4324 return NULL_TREE;
4325 *p_in_p = ! in_p;
4326 return arg0;
4327
4328 case EQ_EXPR: case NE_EXPR:
4329 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4330 /* We can only do something if the range is testing for zero
4331 and if the second operand is an integer constant. Note that
4332 saying something is "in" the range we make is done by
4333 complementing IN_P since it will set in the initial case of
4334 being not equal to zero; "out" is leaving it alone. */
4335 if (low == NULL_TREE || high == NULL_TREE
4336 || ! integer_zerop (low) || ! integer_zerop (high)
4337 || TREE_CODE (arg1) != INTEGER_CST)
4338 return NULL_TREE;
4339
4340 switch (code)
4341 {
4342 case NE_EXPR: /* - [c, c] */
4343 low = high = arg1;
4344 break;
4345 case EQ_EXPR: /* + [c, c] */
4346 in_p = ! in_p, low = high = arg1;
4347 break;
4348 case GT_EXPR: /* - [-, c] */
4349 low = 0, high = arg1;
4350 break;
4351 case GE_EXPR: /* + [c, -] */
4352 in_p = ! in_p, low = arg1, high = 0;
4353 break;
4354 case LT_EXPR: /* - [c, -] */
4355 low = arg1, high = 0;
4356 break;
4357 case LE_EXPR: /* + [-, c] */
4358 in_p = ! in_p, low = 0, high = arg1;
4359 break;
4360 default:
4361 gcc_unreachable ();
4362 }
4363
4364 /* If this is an unsigned comparison, we also know that EXP is
4365 greater than or equal to zero. We base the range tests we make
4366 on that fact, so we record it here so we can parse existing
4367 range tests. We test arg0_type since often the return type
4368 of, e.g. EQ_EXPR, is boolean. */
4369 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4370 {
4371 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4372 in_p, low, high, 1,
4373 build_int_cst (arg0_type, 0),
4374 NULL_TREE))
4375 return NULL_TREE;
4376
4377 in_p = n_in_p, low = n_low, high = n_high;
4378
4379 /* If the high bound is missing, but we have a nonzero low
4380 bound, reverse the range so it goes from zero to the low bound
4381 minus 1. */
4382 if (high == 0 && low && ! integer_zerop (low))
4383 {
4384 in_p = ! in_p;
4385 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4386 build_int_cst (TREE_TYPE (low), 1), 0);
4387 low = build_int_cst (arg0_type, 0);
4388 }
4389 }
4390
4391 *p_low = low;
4392 *p_high = high;
4393 *p_in_p = in_p;
4394 return arg0;
4395
4396 case NEGATE_EXPR:
4397 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4398 low and high are non-NULL, then normalize will DTRT. */
4399 if (!TYPE_UNSIGNED (arg0_type)
4400 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4401 {
4402 if (low == NULL_TREE)
4403 low = TYPE_MIN_VALUE (arg0_type);
4404 if (high == NULL_TREE)
4405 high = TYPE_MAX_VALUE (arg0_type);
4406 }
4407
4408 /* (-x) IN [a,b] -> x in [-b, -a] */
4409 n_low = range_binop (MINUS_EXPR, exp_type,
4410 build_int_cst (exp_type, 0),
4411 0, high, 1);
4412 n_high = range_binop (MINUS_EXPR, exp_type,
4413 build_int_cst (exp_type, 0),
4414 0, low, 0);
4415 if (n_high != 0 && TREE_OVERFLOW (n_high))
4416 return NULL_TREE;
4417 goto normalize;
4418
4419 case BIT_NOT_EXPR:
4420 /* ~ X -> -X - 1 */
4421 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4422 build_int_cst (exp_type, 1));
4423
4424 case PLUS_EXPR:
4425 case MINUS_EXPR:
4426 if (TREE_CODE (arg1) != INTEGER_CST)
4427 return NULL_TREE;
4428
4429 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4430 move a constant to the other side. */
4431 if (!TYPE_UNSIGNED (arg0_type)
4432 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4433 return NULL_TREE;
4434
4435 /* If EXP is signed, any overflow in the computation is undefined,
4436 so we don't worry about it so long as our computations on
4437 the bounds don't overflow. For unsigned, overflow is defined
4438 and this is exactly the right thing. */
4439 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4440 arg0_type, low, 0, arg1, 0);
4441 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4442 arg0_type, high, 1, arg1, 0);
4443 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4444 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4445 return NULL_TREE;
4446
4447 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4448 *strict_overflow_p = true;
4449
4450 normalize:
4451 /* Check for an unsigned range which has wrapped around the maximum
4452 value thus making n_high < n_low, and normalize it. */
4453 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4454 {
4455 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4456 build_int_cst (TREE_TYPE (n_high), 1), 0);
4457 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4458 build_int_cst (TREE_TYPE (n_low), 1), 0);
4459
4460 /* If the range is of the form +/- [ x+1, x ], we won't
4461 be able to normalize it. But then, it represents the
4462 whole range or the empty set, so make it
4463 +/- [ -, - ]. */
4464 if (tree_int_cst_equal (n_low, low)
4465 && tree_int_cst_equal (n_high, high))
4466 low = high = 0;
4467 else
4468 in_p = ! in_p;
4469 }
4470 else
4471 low = n_low, high = n_high;
4472
4473 *p_low = low;
4474 *p_high = high;
4475 *p_in_p = in_p;
4476 return arg0;
4477
4478 CASE_CONVERT:
4479 case NON_LVALUE_EXPR:
4480 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4481 return NULL_TREE;
4482
4483 if (! INTEGRAL_TYPE_P (arg0_type)
4484 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4485 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4486 return NULL_TREE;
4487
4488 n_low = low, n_high = high;
4489
4490 if (n_low != 0)
4491 n_low = fold_convert_loc (loc, arg0_type, n_low);
4492
4493 if (n_high != 0)
4494 n_high = fold_convert_loc (loc, arg0_type, n_high);
4495
4496 /* If we're converting arg0 from an unsigned type, to exp,
4497 a signed type, we will be doing the comparison as unsigned.
4498 The tests above have already verified that LOW and HIGH
4499 are both positive.
4500
4501 So we have to ensure that we will handle large unsigned
4502 values the same way that the current signed bounds treat
4503 negative values. */
4504
4505 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4506 {
4507 tree high_positive;
4508 tree equiv_type;
4509 /* For fixed-point modes, we need to pass the saturating flag
4510 as the 2nd parameter. */
4511 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4512 equiv_type
4513 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4514 TYPE_SATURATING (arg0_type));
4515 else
4516 equiv_type
4517 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4518
4519 /* A range without an upper bound is, naturally, unbounded.
4520 Since convert would have cropped a very large value, use
4521 the max value for the destination type. */
4522 high_positive
4523 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4524 : TYPE_MAX_VALUE (arg0_type);
4525
4526 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4527 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4528 fold_convert_loc (loc, arg0_type,
4529 high_positive),
4530 build_int_cst (arg0_type, 1));
4531
4532 /* If the low bound is specified, "and" the range with the
4533 range for which the original unsigned value will be
4534 positive. */
4535 if (low != 0)
4536 {
4537 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4538 1, fold_convert_loc (loc, arg0_type,
4539 integer_zero_node),
4540 high_positive))
4541 return NULL_TREE;
4542
4543 in_p = (n_in_p == in_p);
4544 }
4545 else
4546 {
4547 /* Otherwise, "or" the range with the range of the input
4548 that will be interpreted as negative. */
4549 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4550 1, fold_convert_loc (loc, arg0_type,
4551 integer_zero_node),
4552 high_positive))
4553 return NULL_TREE;
4554
4555 in_p = (in_p != n_in_p);
4556 }
4557 }
4558
4559 *p_low = n_low;
4560 *p_high = n_high;
4561 *p_in_p = in_p;
4562 return arg0;
4563
4564 default:
4565 return NULL_TREE;
4566 }
4567 }
4568
4569 /* Given EXP, a logical expression, set the range it is testing into
4570 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4571 actually being tested. *PLOW and *PHIGH will be made of the same
4572 type as the returned expression. If EXP is not a comparison, we
4573 will most likely not be returning a useful value and range. Set
4574 *STRICT_OVERFLOW_P to true if the return value is only valid
4575 because signed overflow is undefined; otherwise, do not change
4576 *STRICT_OVERFLOW_P. */
4577
4578 tree
4579 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4580 bool *strict_overflow_p)
4581 {
4582 enum tree_code code;
4583 tree arg0, arg1 = NULL_TREE;
4584 tree exp_type, nexp;
4585 int in_p;
4586 tree low, high;
4587 location_t loc = EXPR_LOCATION (exp);
4588
4589 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4590 and see if we can refine the range. Some of the cases below may not
4591 happen, but it doesn't seem worth worrying about this. We "continue"
4592 the outer loop when we've changed something; otherwise we "break"
4593 the switch, which will "break" the while. */
4594
4595 in_p = 0;
4596 low = high = build_int_cst (TREE_TYPE (exp), 0);
4597
4598 while (1)
4599 {
4600 code = TREE_CODE (exp);
4601 exp_type = TREE_TYPE (exp);
4602 arg0 = NULL_TREE;
4603
4604 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4605 {
4606 if (TREE_OPERAND_LENGTH (exp) > 0)
4607 arg0 = TREE_OPERAND (exp, 0);
4608 if (TREE_CODE_CLASS (code) == tcc_binary
4609 || TREE_CODE_CLASS (code) == tcc_comparison
4610 || (TREE_CODE_CLASS (code) == tcc_expression
4611 && TREE_OPERAND_LENGTH (exp) > 1))
4612 arg1 = TREE_OPERAND (exp, 1);
4613 }
4614 if (arg0 == NULL_TREE)
4615 break;
4616
4617 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4618 &high, &in_p, strict_overflow_p);
4619 if (nexp == NULL_TREE)
4620 break;
4621 exp = nexp;
4622 }
4623
4624 /* If EXP is a constant, we can evaluate whether this is true or false. */
4625 if (TREE_CODE (exp) == INTEGER_CST)
4626 {
4627 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4628 exp, 0, low, 0))
4629 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4630 exp, 1, high, 1)));
4631 low = high = 0;
4632 exp = 0;
4633 }
4634
4635 *pin_p = in_p, *plow = low, *phigh = high;
4636 return exp;
4637 }
4638 \f
4639 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4640 type, TYPE, return an expression to test if EXP is in (or out of, depending
4641 on IN_P) the range. Return 0 if the test couldn't be created. */
4642
4643 tree
4644 build_range_check (location_t loc, tree type, tree exp, int in_p,
4645 tree low, tree high)
4646 {
4647 tree etype = TREE_TYPE (exp), value;
4648
4649 /* Disable this optimization for function pointer expressions
4650 on targets that require function pointer canonicalization. */
4651 if (targetm.have_canonicalize_funcptr_for_compare ()
4652 && TREE_CODE (etype) == POINTER_TYPE
4653 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4654 return NULL_TREE;
4655
4656 if (! in_p)
4657 {
4658 value = build_range_check (loc, type, exp, 1, low, high);
4659 if (value != 0)
4660 return invert_truthvalue_loc (loc, value);
4661
4662 return 0;
4663 }
4664
4665 if (low == 0 && high == 0)
4666 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4667
4668 if (low == 0)
4669 return fold_build2_loc (loc, LE_EXPR, type, exp,
4670 fold_convert_loc (loc, etype, high));
4671
4672 if (high == 0)
4673 return fold_build2_loc (loc, GE_EXPR, type, exp,
4674 fold_convert_loc (loc, etype, low));
4675
4676 if (operand_equal_p (low, high, 0))
4677 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4678 fold_convert_loc (loc, etype, low));
4679
4680 if (integer_zerop (low))
4681 {
4682 if (! TYPE_UNSIGNED (etype))
4683 {
4684 etype = unsigned_type_for (etype);
4685 high = fold_convert_loc (loc, etype, high);
4686 exp = fold_convert_loc (loc, etype, exp);
4687 }
4688 return build_range_check (loc, type, exp, 1, 0, high);
4689 }
4690
4691 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4692 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4693 {
4694 int prec = TYPE_PRECISION (etype);
4695
4696 if (wi::mask (prec - 1, false, prec) == high)
4697 {
4698 if (TYPE_UNSIGNED (etype))
4699 {
4700 tree signed_etype = signed_type_for (etype);
4701 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4702 etype
4703 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4704 else
4705 etype = signed_etype;
4706 exp = fold_convert_loc (loc, etype, exp);
4707 }
4708 return fold_build2_loc (loc, GT_EXPR, type, exp,
4709 build_int_cst (etype, 0));
4710 }
4711 }
4712
4713 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4714 This requires wrap-around arithmetics for the type of the expression.
4715 First make sure that arithmetics in this type is valid, then make sure
4716 that it wraps around. */
4717 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4718 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4719 TYPE_UNSIGNED (etype));
4720
4721 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4722 {
4723 tree utype, minv, maxv;
4724
4725 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4726 for the type in question, as we rely on this here. */
4727 utype = unsigned_type_for (etype);
4728 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4729 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4730 build_int_cst (TREE_TYPE (maxv), 1), 1);
4731 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4732
4733 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4734 minv, 1, maxv, 1)))
4735 etype = utype;
4736 else
4737 return 0;
4738 }
4739
4740 high = fold_convert_loc (loc, etype, high);
4741 low = fold_convert_loc (loc, etype, low);
4742 exp = fold_convert_loc (loc, etype, exp);
4743
4744 value = const_binop (MINUS_EXPR, high, low);
4745
4746
4747 if (POINTER_TYPE_P (etype))
4748 {
4749 if (value != 0 && !TREE_OVERFLOW (value))
4750 {
4751 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4752 return build_range_check (loc, type,
4753 fold_build_pointer_plus_loc (loc, exp, low),
4754 1, build_int_cst (etype, 0), value);
4755 }
4756 return 0;
4757 }
4758
4759 if (value != 0 && !TREE_OVERFLOW (value))
4760 return build_range_check (loc, type,
4761 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4762 1, build_int_cst (etype, 0), value);
4763
4764 return 0;
4765 }
4766 \f
4767 /* Return the predecessor of VAL in its type, handling the infinite case. */
4768
4769 static tree
4770 range_predecessor (tree val)
4771 {
4772 tree type = TREE_TYPE (val);
4773
4774 if (INTEGRAL_TYPE_P (type)
4775 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4776 return 0;
4777 else
4778 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4779 build_int_cst (TREE_TYPE (val), 1), 0);
4780 }
4781
4782 /* Return the successor of VAL in its type, handling the infinite case. */
4783
4784 static tree
4785 range_successor (tree val)
4786 {
4787 tree type = TREE_TYPE (val);
4788
4789 if (INTEGRAL_TYPE_P (type)
4790 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4791 return 0;
4792 else
4793 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4794 build_int_cst (TREE_TYPE (val), 1), 0);
4795 }
4796
4797 /* Given two ranges, see if we can merge them into one. Return 1 if we
4798 can, 0 if we can't. Set the output range into the specified parameters. */
4799
4800 bool
4801 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4802 tree high0, int in1_p, tree low1, tree high1)
4803 {
4804 int no_overlap;
4805 int subset;
4806 int temp;
4807 tree tem;
4808 int in_p;
4809 tree low, high;
4810 int lowequal = ((low0 == 0 && low1 == 0)
4811 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4812 low0, 0, low1, 0)));
4813 int highequal = ((high0 == 0 && high1 == 0)
4814 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4815 high0, 1, high1, 1)));
4816
4817 /* Make range 0 be the range that starts first, or ends last if they
4818 start at the same value. Swap them if it isn't. */
4819 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4820 low0, 0, low1, 0))
4821 || (lowequal
4822 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4823 high1, 1, high0, 1))))
4824 {
4825 temp = in0_p, in0_p = in1_p, in1_p = temp;
4826 tem = low0, low0 = low1, low1 = tem;
4827 tem = high0, high0 = high1, high1 = tem;
4828 }
4829
4830 /* Now flag two cases, whether the ranges are disjoint or whether the
4831 second range is totally subsumed in the first. Note that the tests
4832 below are simplified by the ones above. */
4833 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4834 high0, 1, low1, 0));
4835 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4836 high1, 1, high0, 1));
4837
4838 /* We now have four cases, depending on whether we are including or
4839 excluding the two ranges. */
4840 if (in0_p && in1_p)
4841 {
4842 /* If they don't overlap, the result is false. If the second range
4843 is a subset it is the result. Otherwise, the range is from the start
4844 of the second to the end of the first. */
4845 if (no_overlap)
4846 in_p = 0, low = high = 0;
4847 else if (subset)
4848 in_p = 1, low = low1, high = high1;
4849 else
4850 in_p = 1, low = low1, high = high0;
4851 }
4852
4853 else if (in0_p && ! in1_p)
4854 {
4855 /* If they don't overlap, the result is the first range. If they are
4856 equal, the result is false. If the second range is a subset of the
4857 first, and the ranges begin at the same place, we go from just after
4858 the end of the second range to the end of the first. If the second
4859 range is not a subset of the first, or if it is a subset and both
4860 ranges end at the same place, the range starts at the start of the
4861 first range and ends just before the second range.
4862 Otherwise, we can't describe this as a single range. */
4863 if (no_overlap)
4864 in_p = 1, low = low0, high = high0;
4865 else if (lowequal && highequal)
4866 in_p = 0, low = high = 0;
4867 else if (subset && lowequal)
4868 {
4869 low = range_successor (high1);
4870 high = high0;
4871 in_p = 1;
4872 if (low == 0)
4873 {
4874 /* We are in the weird situation where high0 > high1 but
4875 high1 has no successor. Punt. */
4876 return 0;
4877 }
4878 }
4879 else if (! subset || highequal)
4880 {
4881 low = low0;
4882 high = range_predecessor (low1);
4883 in_p = 1;
4884 if (high == 0)
4885 {
4886 /* low0 < low1 but low1 has no predecessor. Punt. */
4887 return 0;
4888 }
4889 }
4890 else
4891 return 0;
4892 }
4893
4894 else if (! in0_p && in1_p)
4895 {
4896 /* If they don't overlap, the result is the second range. If the second
4897 is a subset of the first, the result is false. Otherwise,
4898 the range starts just after the first range and ends at the
4899 end of the second. */
4900 if (no_overlap)
4901 in_p = 1, low = low1, high = high1;
4902 else if (subset || highequal)
4903 in_p = 0, low = high = 0;
4904 else
4905 {
4906 low = range_successor (high0);
4907 high = high1;
4908 in_p = 1;
4909 if (low == 0)
4910 {
4911 /* high1 > high0 but high0 has no successor. Punt. */
4912 return 0;
4913 }
4914 }
4915 }
4916
4917 else
4918 {
4919 /* The case where we are excluding both ranges. Here the complex case
4920 is if they don't overlap. In that case, the only time we have a
4921 range is if they are adjacent. If the second is a subset of the
4922 first, the result is the first. Otherwise, the range to exclude
4923 starts at the beginning of the first range and ends at the end of the
4924 second. */
4925 if (no_overlap)
4926 {
4927 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4928 range_successor (high0),
4929 1, low1, 0)))
4930 in_p = 0, low = low0, high = high1;
4931 else
4932 {
4933 /* Canonicalize - [min, x] into - [-, x]. */
4934 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4935 switch (TREE_CODE (TREE_TYPE (low0)))
4936 {
4937 case ENUMERAL_TYPE:
4938 if (TYPE_PRECISION (TREE_TYPE (low0))
4939 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4940 break;
4941 /* FALLTHROUGH */
4942 case INTEGER_TYPE:
4943 if (tree_int_cst_equal (low0,
4944 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4945 low0 = 0;
4946 break;
4947 case POINTER_TYPE:
4948 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4949 && integer_zerop (low0))
4950 low0 = 0;
4951 break;
4952 default:
4953 break;
4954 }
4955
4956 /* Canonicalize - [x, max] into - [x, -]. */
4957 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4958 switch (TREE_CODE (TREE_TYPE (high1)))
4959 {
4960 case ENUMERAL_TYPE:
4961 if (TYPE_PRECISION (TREE_TYPE (high1))
4962 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4963 break;
4964 /* FALLTHROUGH */
4965 case INTEGER_TYPE:
4966 if (tree_int_cst_equal (high1,
4967 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4968 high1 = 0;
4969 break;
4970 case POINTER_TYPE:
4971 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4972 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4973 high1, 1,
4974 build_int_cst (TREE_TYPE (high1), 1),
4975 1)))
4976 high1 = 0;
4977 break;
4978 default:
4979 break;
4980 }
4981
4982 /* The ranges might be also adjacent between the maximum and
4983 minimum values of the given type. For
4984 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4985 return + [x + 1, y - 1]. */
4986 if (low0 == 0 && high1 == 0)
4987 {
4988 low = range_successor (high0);
4989 high = range_predecessor (low1);
4990 if (low == 0 || high == 0)
4991 return 0;
4992
4993 in_p = 1;
4994 }
4995 else
4996 return 0;
4997 }
4998 }
4999 else if (subset)
5000 in_p = 0, low = low0, high = high0;
5001 else
5002 in_p = 0, low = low0, high = high1;
5003 }
5004
5005 *pin_p = in_p, *plow = low, *phigh = high;
5006 return 1;
5007 }
5008 \f
5009
5010 /* Subroutine of fold, looking inside expressions of the form
5011 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
5012 of the COND_EXPR. This function is being used also to optimize
5013 A op B ? C : A, by reversing the comparison first.
5014
5015 Return a folded expression whose code is not a COND_EXPR
5016 anymore, or NULL_TREE if no folding opportunity is found. */
5017
5018 static tree
5019 fold_cond_expr_with_comparison (location_t loc, tree type,
5020 tree arg0, tree arg1, tree arg2)
5021 {
5022 enum tree_code comp_code = TREE_CODE (arg0);
5023 tree arg00 = TREE_OPERAND (arg0, 0);
5024 tree arg01 = TREE_OPERAND (arg0, 1);
5025 tree arg1_type = TREE_TYPE (arg1);
5026 tree tem;
5027
5028 STRIP_NOPS (arg1);
5029 STRIP_NOPS (arg2);
5030
5031 /* If we have A op 0 ? A : -A, consider applying the following
5032 transformations:
5033
5034 A == 0? A : -A same as -A
5035 A != 0? A : -A same as A
5036 A >= 0? A : -A same as abs (A)
5037 A > 0? A : -A same as abs (A)
5038 A <= 0? A : -A same as -abs (A)
5039 A < 0? A : -A same as -abs (A)
5040
5041 None of these transformations work for modes with signed
5042 zeros. If A is +/-0, the first two transformations will
5043 change the sign of the result (from +0 to -0, or vice
5044 versa). The last four will fix the sign of the result,
5045 even though the original expressions could be positive or
5046 negative, depending on the sign of A.
5047
5048 Note that all these transformations are correct if A is
5049 NaN, since the two alternatives (A and -A) are also NaNs. */
5050 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5051 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
5052 ? real_zerop (arg01)
5053 : integer_zerop (arg01))
5054 && ((TREE_CODE (arg2) == NEGATE_EXPR
5055 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
5056 /* In the case that A is of the form X-Y, '-A' (arg2) may
5057 have already been folded to Y-X, check for that. */
5058 || (TREE_CODE (arg1) == MINUS_EXPR
5059 && TREE_CODE (arg2) == MINUS_EXPR
5060 && operand_equal_p (TREE_OPERAND (arg1, 0),
5061 TREE_OPERAND (arg2, 1), 0)
5062 && operand_equal_p (TREE_OPERAND (arg1, 1),
5063 TREE_OPERAND (arg2, 0), 0))))
5064 switch (comp_code)
5065 {
5066 case EQ_EXPR:
5067 case UNEQ_EXPR:
5068 tem = fold_convert_loc (loc, arg1_type, arg1);
5069 return pedantic_non_lvalue_loc (loc,
5070 fold_convert_loc (loc, type,
5071 negate_expr (tem)));
5072 case NE_EXPR:
5073 case LTGT_EXPR:
5074 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5075 case UNGE_EXPR:
5076 case UNGT_EXPR:
5077 if (flag_trapping_math)
5078 break;
5079 /* Fall through. */
5080 case GE_EXPR:
5081 case GT_EXPR:
5082 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5083 break;
5084 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5085 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5086 case UNLE_EXPR:
5087 case UNLT_EXPR:
5088 if (flag_trapping_math)
5089 break;
5090 case LE_EXPR:
5091 case LT_EXPR:
5092 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5093 break;
5094 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5095 return negate_expr (fold_convert_loc (loc, type, tem));
5096 default:
5097 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5098 break;
5099 }
5100
5101 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5102 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5103 both transformations are correct when A is NaN: A != 0
5104 is then true, and A == 0 is false. */
5105
5106 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5107 && integer_zerop (arg01) && integer_zerop (arg2))
5108 {
5109 if (comp_code == NE_EXPR)
5110 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5111 else if (comp_code == EQ_EXPR)
5112 return build_zero_cst (type);
5113 }
5114
5115 /* Try some transformations of A op B ? A : B.
5116
5117 A == B? A : B same as B
5118 A != B? A : B same as A
5119 A >= B? A : B same as max (A, B)
5120 A > B? A : B same as max (B, A)
5121 A <= B? A : B same as min (A, B)
5122 A < B? A : B same as min (B, A)
5123
5124 As above, these transformations don't work in the presence
5125 of signed zeros. For example, if A and B are zeros of
5126 opposite sign, the first two transformations will change
5127 the sign of the result. In the last four, the original
5128 expressions give different results for (A=+0, B=-0) and
5129 (A=-0, B=+0), but the transformed expressions do not.
5130
5131 The first two transformations are correct if either A or B
5132 is a NaN. In the first transformation, the condition will
5133 be false, and B will indeed be chosen. In the case of the
5134 second transformation, the condition A != B will be true,
5135 and A will be chosen.
5136
5137 The conversions to max() and min() are not correct if B is
5138 a number and A is not. The conditions in the original
5139 expressions will be false, so all four give B. The min()
5140 and max() versions would give a NaN instead. */
5141 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5142 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5143 /* Avoid these transformations if the COND_EXPR may be used
5144 as an lvalue in the C++ front-end. PR c++/19199. */
5145 && (in_gimple_form
5146 || VECTOR_TYPE_P (type)
5147 || (! lang_GNU_CXX ()
5148 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5149 || ! maybe_lvalue_p (arg1)
5150 || ! maybe_lvalue_p (arg2)))
5151 {
5152 tree comp_op0 = arg00;
5153 tree comp_op1 = arg01;
5154 tree comp_type = TREE_TYPE (comp_op0);
5155
5156 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5157 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5158 {
5159 comp_type = type;
5160 comp_op0 = arg1;
5161 comp_op1 = arg2;
5162 }
5163
5164 switch (comp_code)
5165 {
5166 case EQ_EXPR:
5167 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5168 case NE_EXPR:
5169 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5170 case LE_EXPR:
5171 case LT_EXPR:
5172 case UNLE_EXPR:
5173 case UNLT_EXPR:
5174 /* In C++ a ?: expression can be an lvalue, so put the
5175 operand which will be used if they are equal first
5176 so that we can convert this back to the
5177 corresponding COND_EXPR. */
5178 if (!HONOR_NANS (arg1))
5179 {
5180 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5181 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5182 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5183 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5184 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5185 comp_op1, comp_op0);
5186 return pedantic_non_lvalue_loc (loc,
5187 fold_convert_loc (loc, type, tem));
5188 }
5189 break;
5190 case GE_EXPR:
5191 case GT_EXPR:
5192 case UNGE_EXPR:
5193 case UNGT_EXPR:
5194 if (!HONOR_NANS (arg1))
5195 {
5196 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5197 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5198 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5199 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5200 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5201 comp_op1, comp_op0);
5202 return pedantic_non_lvalue_loc (loc,
5203 fold_convert_loc (loc, type, tem));
5204 }
5205 break;
5206 case UNEQ_EXPR:
5207 if (!HONOR_NANS (arg1))
5208 return pedantic_non_lvalue_loc (loc,
5209 fold_convert_loc (loc, type, arg2));
5210 break;
5211 case LTGT_EXPR:
5212 if (!HONOR_NANS (arg1))
5213 return pedantic_non_lvalue_loc (loc,
5214 fold_convert_loc (loc, type, arg1));
5215 break;
5216 default:
5217 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5218 break;
5219 }
5220 }
5221
5222 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5223 we might still be able to simplify this. For example,
5224 if C1 is one less or one more than C2, this might have started
5225 out as a MIN or MAX and been transformed by this function.
5226 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5227
5228 if (INTEGRAL_TYPE_P (type)
5229 && TREE_CODE (arg01) == INTEGER_CST
5230 && TREE_CODE (arg2) == INTEGER_CST)
5231 switch (comp_code)
5232 {
5233 case EQ_EXPR:
5234 if (TREE_CODE (arg1) == INTEGER_CST)
5235 break;
5236 /* We can replace A with C1 in this case. */
5237 arg1 = fold_convert_loc (loc, type, arg01);
5238 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5239
5240 case LT_EXPR:
5241 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5242 MIN_EXPR, to preserve the signedness of the comparison. */
5243 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5244 OEP_ONLY_CONST)
5245 && operand_equal_p (arg01,
5246 const_binop (PLUS_EXPR, arg2,
5247 build_int_cst (type, 1)),
5248 OEP_ONLY_CONST))
5249 {
5250 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5251 fold_convert_loc (loc, TREE_TYPE (arg00),
5252 arg2));
5253 return pedantic_non_lvalue_loc (loc,
5254 fold_convert_loc (loc, type, tem));
5255 }
5256 break;
5257
5258 case LE_EXPR:
5259 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5260 as above. */
5261 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5262 OEP_ONLY_CONST)
5263 && operand_equal_p (arg01,
5264 const_binop (MINUS_EXPR, arg2,
5265 build_int_cst (type, 1)),
5266 OEP_ONLY_CONST))
5267 {
5268 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5269 fold_convert_loc (loc, TREE_TYPE (arg00),
5270 arg2));
5271 return pedantic_non_lvalue_loc (loc,
5272 fold_convert_loc (loc, type, tem));
5273 }
5274 break;
5275
5276 case GT_EXPR:
5277 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5278 MAX_EXPR, to preserve the signedness of the comparison. */
5279 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5280 OEP_ONLY_CONST)
5281 && operand_equal_p (arg01,
5282 const_binop (MINUS_EXPR, arg2,
5283 build_int_cst (type, 1)),
5284 OEP_ONLY_CONST))
5285 {
5286 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5287 fold_convert_loc (loc, TREE_TYPE (arg00),
5288 arg2));
5289 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5290 }
5291 break;
5292
5293 case GE_EXPR:
5294 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5295 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5296 OEP_ONLY_CONST)
5297 && operand_equal_p (arg01,
5298 const_binop (PLUS_EXPR, arg2,
5299 build_int_cst (type, 1)),
5300 OEP_ONLY_CONST))
5301 {
5302 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5303 fold_convert_loc (loc, TREE_TYPE (arg00),
5304 arg2));
5305 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5306 }
5307 break;
5308 case NE_EXPR:
5309 break;
5310 default:
5311 gcc_unreachable ();
5312 }
5313
5314 return NULL_TREE;
5315 }
5316
5317
5318 \f
5319 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5320 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5321 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5322 false) >= 2)
5323 #endif
5324
5325 /* EXP is some logical combination of boolean tests. See if we can
5326 merge it into some range test. Return the new tree if so. */
5327
5328 static tree
5329 fold_range_test (location_t loc, enum tree_code code, tree type,
5330 tree op0, tree op1)
5331 {
5332 int or_op = (code == TRUTH_ORIF_EXPR
5333 || code == TRUTH_OR_EXPR);
5334 int in0_p, in1_p, in_p;
5335 tree low0, low1, low, high0, high1, high;
5336 bool strict_overflow_p = false;
5337 tree tem, lhs, rhs;
5338 const char * const warnmsg = G_("assuming signed overflow does not occur "
5339 "when simplifying range test");
5340
5341 if (!INTEGRAL_TYPE_P (type))
5342 return 0;
5343
5344 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5345 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5346
5347 /* If this is an OR operation, invert both sides; we will invert
5348 again at the end. */
5349 if (or_op)
5350 in0_p = ! in0_p, in1_p = ! in1_p;
5351
5352 /* If both expressions are the same, if we can merge the ranges, and we
5353 can build the range test, return it or it inverted. If one of the
5354 ranges is always true or always false, consider it to be the same
5355 expression as the other. */
5356 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5357 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5358 in1_p, low1, high1)
5359 && 0 != (tem = (build_range_check (loc, type,
5360 lhs != 0 ? lhs
5361 : rhs != 0 ? rhs : integer_zero_node,
5362 in_p, low, high))))
5363 {
5364 if (strict_overflow_p)
5365 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5366 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5367 }
5368
5369 /* On machines where the branch cost is expensive, if this is a
5370 short-circuited branch and the underlying object on both sides
5371 is the same, make a non-short-circuit operation. */
5372 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5373 && lhs != 0 && rhs != 0
5374 && (code == TRUTH_ANDIF_EXPR
5375 || code == TRUTH_ORIF_EXPR)
5376 && operand_equal_p (lhs, rhs, 0))
5377 {
5378 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5379 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5380 which cases we can't do this. */
5381 if (simple_operand_p (lhs))
5382 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5383 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5384 type, op0, op1);
5385
5386 else if (!lang_hooks.decls.global_bindings_p ()
5387 && !CONTAINS_PLACEHOLDER_P (lhs))
5388 {
5389 tree common = save_expr (lhs);
5390
5391 if (0 != (lhs = build_range_check (loc, type, common,
5392 or_op ? ! in0_p : in0_p,
5393 low0, high0))
5394 && (0 != (rhs = build_range_check (loc, type, common,
5395 or_op ? ! in1_p : in1_p,
5396 low1, high1))))
5397 {
5398 if (strict_overflow_p)
5399 fold_overflow_warning (warnmsg,
5400 WARN_STRICT_OVERFLOW_COMPARISON);
5401 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5402 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5403 type, lhs, rhs);
5404 }
5405 }
5406 }
5407
5408 return 0;
5409 }
5410 \f
5411 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5412 bit value. Arrange things so the extra bits will be set to zero if and
5413 only if C is signed-extended to its full width. If MASK is nonzero,
5414 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5415
5416 static tree
5417 unextend (tree c, int p, int unsignedp, tree mask)
5418 {
5419 tree type = TREE_TYPE (c);
5420 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5421 tree temp;
5422
5423 if (p == modesize || unsignedp)
5424 return c;
5425
5426 /* We work by getting just the sign bit into the low-order bit, then
5427 into the high-order bit, then sign-extend. We then XOR that value
5428 with C. */
5429 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5430
5431 /* We must use a signed type in order to get an arithmetic right shift.
5432 However, we must also avoid introducing accidental overflows, so that
5433 a subsequent call to integer_zerop will work. Hence we must
5434 do the type conversion here. At this point, the constant is either
5435 zero or one, and the conversion to a signed type can never overflow.
5436 We could get an overflow if this conversion is done anywhere else. */
5437 if (TYPE_UNSIGNED (type))
5438 temp = fold_convert (signed_type_for (type), temp);
5439
5440 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5441 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5442 if (mask != 0)
5443 temp = const_binop (BIT_AND_EXPR, temp,
5444 fold_convert (TREE_TYPE (c), mask));
5445 /* If necessary, convert the type back to match the type of C. */
5446 if (TYPE_UNSIGNED (type))
5447 temp = fold_convert (type, temp);
5448
5449 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5450 }
5451 \f
5452 /* For an expression that has the form
5453 (A && B) || ~B
5454 or
5455 (A || B) && ~B,
5456 we can drop one of the inner expressions and simplify to
5457 A || ~B
5458 or
5459 A && ~B
5460 LOC is the location of the resulting expression. OP is the inner
5461 logical operation; the left-hand side in the examples above, while CMPOP
5462 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5463 removing a condition that guards another, as in
5464 (A != NULL && A->...) || A == NULL
5465 which we must not transform. If RHS_ONLY is true, only eliminate the
5466 right-most operand of the inner logical operation. */
5467
5468 static tree
5469 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5470 bool rhs_only)
5471 {
5472 tree type = TREE_TYPE (cmpop);
5473 enum tree_code code = TREE_CODE (cmpop);
5474 enum tree_code truthop_code = TREE_CODE (op);
5475 tree lhs = TREE_OPERAND (op, 0);
5476 tree rhs = TREE_OPERAND (op, 1);
5477 tree orig_lhs = lhs, orig_rhs = rhs;
5478 enum tree_code rhs_code = TREE_CODE (rhs);
5479 enum tree_code lhs_code = TREE_CODE (lhs);
5480 enum tree_code inv_code;
5481
5482 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5483 return NULL_TREE;
5484
5485 if (TREE_CODE_CLASS (code) != tcc_comparison)
5486 return NULL_TREE;
5487
5488 if (rhs_code == truthop_code)
5489 {
5490 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5491 if (newrhs != NULL_TREE)
5492 {
5493 rhs = newrhs;
5494 rhs_code = TREE_CODE (rhs);
5495 }
5496 }
5497 if (lhs_code == truthop_code && !rhs_only)
5498 {
5499 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5500 if (newlhs != NULL_TREE)
5501 {
5502 lhs = newlhs;
5503 lhs_code = TREE_CODE (lhs);
5504 }
5505 }
5506
5507 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5508 if (inv_code == rhs_code
5509 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5510 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5511 return lhs;
5512 if (!rhs_only && inv_code == lhs_code
5513 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5514 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5515 return rhs;
5516 if (rhs != orig_rhs || lhs != orig_lhs)
5517 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5518 lhs, rhs);
5519 return NULL_TREE;
5520 }
5521
5522 /* Find ways of folding logical expressions of LHS and RHS:
5523 Try to merge two comparisons to the same innermost item.
5524 Look for range tests like "ch >= '0' && ch <= '9'".
5525 Look for combinations of simple terms on machines with expensive branches
5526 and evaluate the RHS unconditionally.
5527
5528 For example, if we have p->a == 2 && p->b == 4 and we can make an
5529 object large enough to span both A and B, we can do this with a comparison
5530 against the object ANDed with the a mask.
5531
5532 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5533 operations to do this with one comparison.
5534
5535 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5536 function and the one above.
5537
5538 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5539 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5540
5541 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5542 two operands.
5543
5544 We return the simplified tree or 0 if no optimization is possible. */
5545
5546 static tree
5547 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5548 tree lhs, tree rhs)
5549 {
5550 /* If this is the "or" of two comparisons, we can do something if
5551 the comparisons are NE_EXPR. If this is the "and", we can do something
5552 if the comparisons are EQ_EXPR. I.e.,
5553 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5554
5555 WANTED_CODE is this operation code. For single bit fields, we can
5556 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5557 comparison for one-bit fields. */
5558
5559 enum tree_code wanted_code;
5560 enum tree_code lcode, rcode;
5561 tree ll_arg, lr_arg, rl_arg, rr_arg;
5562 tree ll_inner, lr_inner, rl_inner, rr_inner;
5563 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5564 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5565 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5566 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5567 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5568 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5569 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5570 machine_mode lnmode, rnmode;
5571 tree ll_mask, lr_mask, rl_mask, rr_mask;
5572 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5573 tree l_const, r_const;
5574 tree lntype, rntype, result;
5575 HOST_WIDE_INT first_bit, end_bit;
5576 int volatilep;
5577
5578 /* Start by getting the comparison codes. Fail if anything is volatile.
5579 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5580 it were surrounded with a NE_EXPR. */
5581
5582 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5583 return 0;
5584
5585 lcode = TREE_CODE (lhs);
5586 rcode = TREE_CODE (rhs);
5587
5588 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5589 {
5590 lhs = build2 (NE_EXPR, truth_type, lhs,
5591 build_int_cst (TREE_TYPE (lhs), 0));
5592 lcode = NE_EXPR;
5593 }
5594
5595 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5596 {
5597 rhs = build2 (NE_EXPR, truth_type, rhs,
5598 build_int_cst (TREE_TYPE (rhs), 0));
5599 rcode = NE_EXPR;
5600 }
5601
5602 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5603 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5604 return 0;
5605
5606 ll_arg = TREE_OPERAND (lhs, 0);
5607 lr_arg = TREE_OPERAND (lhs, 1);
5608 rl_arg = TREE_OPERAND (rhs, 0);
5609 rr_arg = TREE_OPERAND (rhs, 1);
5610
5611 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5612 if (simple_operand_p (ll_arg)
5613 && simple_operand_p (lr_arg))
5614 {
5615 if (operand_equal_p (ll_arg, rl_arg, 0)
5616 && operand_equal_p (lr_arg, rr_arg, 0))
5617 {
5618 result = combine_comparisons (loc, code, lcode, rcode,
5619 truth_type, ll_arg, lr_arg);
5620 if (result)
5621 return result;
5622 }
5623 else if (operand_equal_p (ll_arg, rr_arg, 0)
5624 && operand_equal_p (lr_arg, rl_arg, 0))
5625 {
5626 result = combine_comparisons (loc, code, lcode,
5627 swap_tree_comparison (rcode),
5628 truth_type, ll_arg, lr_arg);
5629 if (result)
5630 return result;
5631 }
5632 }
5633
5634 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5635 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5636
5637 /* If the RHS can be evaluated unconditionally and its operands are
5638 simple, it wins to evaluate the RHS unconditionally on machines
5639 with expensive branches. In this case, this isn't a comparison
5640 that can be merged. */
5641
5642 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5643 false) >= 2
5644 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5645 && simple_operand_p (rl_arg)
5646 && simple_operand_p (rr_arg))
5647 {
5648 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5649 if (code == TRUTH_OR_EXPR
5650 && lcode == NE_EXPR && integer_zerop (lr_arg)
5651 && rcode == NE_EXPR && integer_zerop (rr_arg)
5652 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5653 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5654 return build2_loc (loc, NE_EXPR, truth_type,
5655 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5656 ll_arg, rl_arg),
5657 build_int_cst (TREE_TYPE (ll_arg), 0));
5658
5659 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5660 if (code == TRUTH_AND_EXPR
5661 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5662 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5663 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5664 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5665 return build2_loc (loc, EQ_EXPR, truth_type,
5666 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5667 ll_arg, rl_arg),
5668 build_int_cst (TREE_TYPE (ll_arg), 0));
5669 }
5670
5671 /* See if the comparisons can be merged. Then get all the parameters for
5672 each side. */
5673
5674 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5675 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5676 return 0;
5677
5678 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5679 volatilep = 0;
5680 ll_inner = decode_field_reference (loc, ll_arg,
5681 &ll_bitsize, &ll_bitpos, &ll_mode,
5682 &ll_unsignedp, &ll_reversep, &volatilep,
5683 &ll_mask, &ll_and_mask);
5684 lr_inner = decode_field_reference (loc, lr_arg,
5685 &lr_bitsize, &lr_bitpos, &lr_mode,
5686 &lr_unsignedp, &lr_reversep, &volatilep,
5687 &lr_mask, &lr_and_mask);
5688 rl_inner = decode_field_reference (loc, rl_arg,
5689 &rl_bitsize, &rl_bitpos, &rl_mode,
5690 &rl_unsignedp, &rl_reversep, &volatilep,
5691 &rl_mask, &rl_and_mask);
5692 rr_inner = decode_field_reference (loc, rr_arg,
5693 &rr_bitsize, &rr_bitpos, &rr_mode,
5694 &rr_unsignedp, &rr_reversep, &volatilep,
5695 &rr_mask, &rr_and_mask);
5696
5697 /* It must be true that the inner operation on the lhs of each
5698 comparison must be the same if we are to be able to do anything.
5699 Then see if we have constants. If not, the same must be true for
5700 the rhs's. */
5701 if (volatilep
5702 || ll_reversep != rl_reversep
5703 || ll_inner == 0 || rl_inner == 0
5704 || ! operand_equal_p (ll_inner, rl_inner, 0))
5705 return 0;
5706
5707 if (TREE_CODE (lr_arg) == INTEGER_CST
5708 && TREE_CODE (rr_arg) == INTEGER_CST)
5709 {
5710 l_const = lr_arg, r_const = rr_arg;
5711 lr_reversep = ll_reversep;
5712 }
5713 else if (lr_reversep != rr_reversep
5714 || lr_inner == 0 || rr_inner == 0
5715 || ! operand_equal_p (lr_inner, rr_inner, 0))
5716 return 0;
5717 else
5718 l_const = r_const = 0;
5719
5720 /* If either comparison code is not correct for our logical operation,
5721 fail. However, we can convert a one-bit comparison against zero into
5722 the opposite comparison against that bit being set in the field. */
5723
5724 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5725 if (lcode != wanted_code)
5726 {
5727 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5728 {
5729 /* Make the left operand unsigned, since we are only interested
5730 in the value of one bit. Otherwise we are doing the wrong
5731 thing below. */
5732 ll_unsignedp = 1;
5733 l_const = ll_mask;
5734 }
5735 else
5736 return 0;
5737 }
5738
5739 /* This is analogous to the code for l_const above. */
5740 if (rcode != wanted_code)
5741 {
5742 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5743 {
5744 rl_unsignedp = 1;
5745 r_const = rl_mask;
5746 }
5747 else
5748 return 0;
5749 }
5750
5751 /* See if we can find a mode that contains both fields being compared on
5752 the left. If we can't, fail. Otherwise, update all constants and masks
5753 to be relative to a field of that size. */
5754 first_bit = MIN (ll_bitpos, rl_bitpos);
5755 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5756 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5757 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5758 volatilep);
5759 if (lnmode == VOIDmode)
5760 return 0;
5761
5762 lnbitsize = GET_MODE_BITSIZE (lnmode);
5763 lnbitpos = first_bit & ~ (lnbitsize - 1);
5764 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5765 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5766
5767 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5768 {
5769 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5770 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5771 }
5772
5773 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5774 size_int (xll_bitpos));
5775 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5776 size_int (xrl_bitpos));
5777
5778 if (l_const)
5779 {
5780 l_const = fold_convert_loc (loc, lntype, l_const);
5781 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5782 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5783 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5784 fold_build1_loc (loc, BIT_NOT_EXPR,
5785 lntype, ll_mask))))
5786 {
5787 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5788
5789 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5790 }
5791 }
5792 if (r_const)
5793 {
5794 r_const = fold_convert_loc (loc, lntype, r_const);
5795 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5796 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5797 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5798 fold_build1_loc (loc, BIT_NOT_EXPR,
5799 lntype, rl_mask))))
5800 {
5801 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5802
5803 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5804 }
5805 }
5806
5807 /* If the right sides are not constant, do the same for it. Also,
5808 disallow this optimization if a size or signedness mismatch occurs
5809 between the left and right sides. */
5810 if (l_const == 0)
5811 {
5812 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5813 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5814 /* Make sure the two fields on the right
5815 correspond to the left without being swapped. */
5816 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5817 return 0;
5818
5819 first_bit = MIN (lr_bitpos, rr_bitpos);
5820 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5821 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5822 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5823 volatilep);
5824 if (rnmode == VOIDmode)
5825 return 0;
5826
5827 rnbitsize = GET_MODE_BITSIZE (rnmode);
5828 rnbitpos = first_bit & ~ (rnbitsize - 1);
5829 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5830 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5831
5832 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5833 {
5834 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5835 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5836 }
5837
5838 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5839 rntype, lr_mask),
5840 size_int (xlr_bitpos));
5841 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5842 rntype, rr_mask),
5843 size_int (xrr_bitpos));
5844
5845 /* Make a mask that corresponds to both fields being compared.
5846 Do this for both items being compared. If the operands are the
5847 same size and the bits being compared are in the same position
5848 then we can do this by masking both and comparing the masked
5849 results. */
5850 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5851 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5852 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5853 {
5854 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5855 ll_unsignedp || rl_unsignedp, ll_reversep);
5856 if (! all_ones_mask_p (ll_mask, lnbitsize))
5857 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5858
5859 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5860 lr_unsignedp || rr_unsignedp, lr_reversep);
5861 if (! all_ones_mask_p (lr_mask, rnbitsize))
5862 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5863
5864 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5865 }
5866
5867 /* There is still another way we can do something: If both pairs of
5868 fields being compared are adjacent, we may be able to make a wider
5869 field containing them both.
5870
5871 Note that we still must mask the lhs/rhs expressions. Furthermore,
5872 the mask must be shifted to account for the shift done by
5873 make_bit_field_ref. */
5874 if ((ll_bitsize + ll_bitpos == rl_bitpos
5875 && lr_bitsize + lr_bitpos == rr_bitpos)
5876 || (ll_bitpos == rl_bitpos + rl_bitsize
5877 && lr_bitpos == rr_bitpos + rr_bitsize))
5878 {
5879 tree type;
5880
5881 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5882 ll_bitsize + rl_bitsize,
5883 MIN (ll_bitpos, rl_bitpos),
5884 ll_unsignedp, ll_reversep);
5885 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5886 lr_bitsize + rr_bitsize,
5887 MIN (lr_bitpos, rr_bitpos),
5888 lr_unsignedp, lr_reversep);
5889
5890 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5891 size_int (MIN (xll_bitpos, xrl_bitpos)));
5892 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5893 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5894
5895 /* Convert to the smaller type before masking out unwanted bits. */
5896 type = lntype;
5897 if (lntype != rntype)
5898 {
5899 if (lnbitsize > rnbitsize)
5900 {
5901 lhs = fold_convert_loc (loc, rntype, lhs);
5902 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5903 type = rntype;
5904 }
5905 else if (lnbitsize < rnbitsize)
5906 {
5907 rhs = fold_convert_loc (loc, lntype, rhs);
5908 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5909 type = lntype;
5910 }
5911 }
5912
5913 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5914 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5915
5916 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5917 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5918
5919 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5920 }
5921
5922 return 0;
5923 }
5924
5925 /* Handle the case of comparisons with constants. If there is something in
5926 common between the masks, those bits of the constants must be the same.
5927 If not, the condition is always false. Test for this to avoid generating
5928 incorrect code below. */
5929 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5930 if (! integer_zerop (result)
5931 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5932 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5933 {
5934 if (wanted_code == NE_EXPR)
5935 {
5936 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5937 return constant_boolean_node (true, truth_type);
5938 }
5939 else
5940 {
5941 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5942 return constant_boolean_node (false, truth_type);
5943 }
5944 }
5945
5946 /* Construct the expression we will return. First get the component
5947 reference we will make. Unless the mask is all ones the width of
5948 that field, perform the mask operation. Then compare with the
5949 merged constant. */
5950 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5951 ll_unsignedp || rl_unsignedp, ll_reversep);
5952
5953 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5954 if (! all_ones_mask_p (ll_mask, lnbitsize))
5955 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5956
5957 return build2_loc (loc, wanted_code, truth_type, result,
5958 const_binop (BIT_IOR_EXPR, l_const, r_const));
5959 }
5960 \f
5961 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5962 constant. */
5963
5964 static tree
5965 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5966 tree op0, tree op1)
5967 {
5968 tree arg0 = op0;
5969 enum tree_code op_code;
5970 tree comp_const;
5971 tree minmax_const;
5972 int consts_equal, consts_lt;
5973 tree inner;
5974
5975 STRIP_SIGN_NOPS (arg0);
5976
5977 op_code = TREE_CODE (arg0);
5978 minmax_const = TREE_OPERAND (arg0, 1);
5979 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5980 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5981 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5982 inner = TREE_OPERAND (arg0, 0);
5983
5984 /* If something does not permit us to optimize, return the original tree. */
5985 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5986 || TREE_CODE (comp_const) != INTEGER_CST
5987 || TREE_OVERFLOW (comp_const)
5988 || TREE_CODE (minmax_const) != INTEGER_CST
5989 || TREE_OVERFLOW (minmax_const))
5990 return NULL_TREE;
5991
5992 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5993 and GT_EXPR, doing the rest with recursive calls using logical
5994 simplifications. */
5995 switch (code)
5996 {
5997 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5998 {
5999 tree tem
6000 = optimize_minmax_comparison (loc,
6001 invert_tree_comparison (code, false),
6002 type, op0, op1);
6003 if (tem)
6004 return invert_truthvalue_loc (loc, tem);
6005 return NULL_TREE;
6006 }
6007
6008 case GE_EXPR:
6009 return
6010 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
6011 optimize_minmax_comparison
6012 (loc, EQ_EXPR, type, arg0, comp_const),
6013 optimize_minmax_comparison
6014 (loc, GT_EXPR, type, arg0, comp_const));
6015
6016 case EQ_EXPR:
6017 if (op_code == MAX_EXPR && consts_equal)
6018 /* MAX (X, 0) == 0 -> X <= 0 */
6019 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
6020
6021 else if (op_code == MAX_EXPR && consts_lt)
6022 /* MAX (X, 0) == 5 -> X == 5 */
6023 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6024
6025 else if (op_code == MAX_EXPR)
6026 /* MAX (X, 0) == -1 -> false */
6027 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6028
6029 else if (consts_equal)
6030 /* MIN (X, 0) == 0 -> X >= 0 */
6031 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
6032
6033 else if (consts_lt)
6034 /* MIN (X, 0) == 5 -> false */
6035 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6036
6037 else
6038 /* MIN (X, 0) == -1 -> X == -1 */
6039 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
6040
6041 case GT_EXPR:
6042 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
6043 /* MAX (X, 0) > 0 -> X > 0
6044 MAX (X, 0) > 5 -> X > 5 */
6045 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6046
6047 else if (op_code == MAX_EXPR)
6048 /* MAX (X, 0) > -1 -> true */
6049 return omit_one_operand_loc (loc, type, integer_one_node, inner);
6050
6051 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
6052 /* MIN (X, 0) > 0 -> false
6053 MIN (X, 0) > 5 -> false */
6054 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
6055
6056 else
6057 /* MIN (X, 0) > -1 -> X > -1 */
6058 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
6059
6060 default:
6061 return NULL_TREE;
6062 }
6063 }
6064 \f
6065 /* T is an integer expression that is being multiplied, divided, or taken a
6066 modulus (CODE says which and what kind of divide or modulus) by a
6067 constant C. See if we can eliminate that operation by folding it with
6068 other operations already in T. WIDE_TYPE, if non-null, is a type that
6069 should be used for the computation if wider than our type.
6070
6071 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6072 (X * 2) + (Y * 4). We must, however, be assured that either the original
6073 expression would not overflow or that overflow is undefined for the type
6074 in the language in question.
6075
6076 If we return a non-null expression, it is an equivalent form of the
6077 original computation, but need not be in the original type.
6078
6079 We set *STRICT_OVERFLOW_P to true if the return values depends on
6080 signed overflow being undefined. Otherwise we do not change
6081 *STRICT_OVERFLOW_P. */
6082
6083 static tree
6084 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6085 bool *strict_overflow_p)
6086 {
6087 /* To avoid exponential search depth, refuse to allow recursion past
6088 three levels. Beyond that (1) it's highly unlikely that we'll find
6089 something interesting and (2) we've probably processed it before
6090 when we built the inner expression. */
6091
6092 static int depth;
6093 tree ret;
6094
6095 if (depth > 3)
6096 return NULL;
6097
6098 depth++;
6099 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6100 depth--;
6101
6102 return ret;
6103 }
6104
6105 static tree
6106 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6107 bool *strict_overflow_p)
6108 {
6109 tree type = TREE_TYPE (t);
6110 enum tree_code tcode = TREE_CODE (t);
6111 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6112 > GET_MODE_SIZE (TYPE_MODE (type)))
6113 ? wide_type : type);
6114 tree t1, t2;
6115 int same_p = tcode == code;
6116 tree op0 = NULL_TREE, op1 = NULL_TREE;
6117 bool sub_strict_overflow_p;
6118
6119 /* Don't deal with constants of zero here; they confuse the code below. */
6120 if (integer_zerop (c))
6121 return NULL_TREE;
6122
6123 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6124 op0 = TREE_OPERAND (t, 0);
6125
6126 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6127 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6128
6129 /* Note that we need not handle conditional operations here since fold
6130 already handles those cases. So just do arithmetic here. */
6131 switch (tcode)
6132 {
6133 case INTEGER_CST:
6134 /* For a constant, we can always simplify if we are a multiply
6135 or (for divide and modulus) if it is a multiple of our constant. */
6136 if (code == MULT_EXPR
6137 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6138 {
6139 tree tem = const_binop (code, fold_convert (ctype, t),
6140 fold_convert (ctype, c));
6141 /* If the multiplication overflowed, we lost information on it.
6142 See PR68142 and PR69845. */
6143 if (TREE_OVERFLOW (tem))
6144 return NULL_TREE;
6145 return tem;
6146 }
6147 break;
6148
6149 CASE_CONVERT: case NON_LVALUE_EXPR:
6150 /* If op0 is an expression ... */
6151 if ((COMPARISON_CLASS_P (op0)
6152 || UNARY_CLASS_P (op0)
6153 || BINARY_CLASS_P (op0)
6154 || VL_EXP_CLASS_P (op0)
6155 || EXPRESSION_CLASS_P (op0))
6156 /* ... and has wrapping overflow, and its type is smaller
6157 than ctype, then we cannot pass through as widening. */
6158 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6159 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6160 && (TYPE_PRECISION (ctype)
6161 > TYPE_PRECISION (TREE_TYPE (op0))))
6162 /* ... or this is a truncation (t is narrower than op0),
6163 then we cannot pass through this narrowing. */
6164 || (TYPE_PRECISION (type)
6165 < TYPE_PRECISION (TREE_TYPE (op0)))
6166 /* ... or signedness changes for division or modulus,
6167 then we cannot pass through this conversion. */
6168 || (code != MULT_EXPR
6169 && (TYPE_UNSIGNED (ctype)
6170 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6171 /* ... or has undefined overflow while the converted to
6172 type has not, we cannot do the operation in the inner type
6173 as that would introduce undefined overflow. */
6174 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6175 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6176 && !TYPE_OVERFLOW_UNDEFINED (type))))
6177 break;
6178
6179 /* Pass the constant down and see if we can make a simplification. If
6180 we can, replace this expression with the inner simplification for
6181 possible later conversion to our or some other type. */
6182 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6183 && TREE_CODE (t2) == INTEGER_CST
6184 && !TREE_OVERFLOW (t2)
6185 && (0 != (t1 = extract_muldiv (op0, t2, code,
6186 code == MULT_EXPR
6187 ? ctype : NULL_TREE,
6188 strict_overflow_p))))
6189 return t1;
6190 break;
6191
6192 case ABS_EXPR:
6193 /* If widening the type changes it from signed to unsigned, then we
6194 must avoid building ABS_EXPR itself as unsigned. */
6195 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6196 {
6197 tree cstype = (*signed_type_for) (ctype);
6198 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6199 != 0)
6200 {
6201 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6202 return fold_convert (ctype, t1);
6203 }
6204 break;
6205 }
6206 /* If the constant is negative, we cannot simplify this. */
6207 if (tree_int_cst_sgn (c) == -1)
6208 break;
6209 /* FALLTHROUGH */
6210 case NEGATE_EXPR:
6211 /* For division and modulus, type can't be unsigned, as e.g.
6212 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6213 For signed types, even with wrapping overflow, this is fine. */
6214 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6215 break;
6216 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6217 != 0)
6218 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6219 break;
6220
6221 case MIN_EXPR: case MAX_EXPR:
6222 /* If widening the type changes the signedness, then we can't perform
6223 this optimization as that changes the result. */
6224 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6225 break;
6226
6227 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6228 sub_strict_overflow_p = false;
6229 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6230 &sub_strict_overflow_p)) != 0
6231 && (t2 = extract_muldiv (op1, c, code, wide_type,
6232 &sub_strict_overflow_p)) != 0)
6233 {
6234 if (tree_int_cst_sgn (c) < 0)
6235 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6236 if (sub_strict_overflow_p)
6237 *strict_overflow_p = true;
6238 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6239 fold_convert (ctype, t2));
6240 }
6241 break;
6242
6243 case LSHIFT_EXPR: case RSHIFT_EXPR:
6244 /* If the second operand is constant, this is a multiplication
6245 or floor division, by a power of two, so we can treat it that
6246 way unless the multiplier or divisor overflows. Signed
6247 left-shift overflow is implementation-defined rather than
6248 undefined in C90, so do not convert signed left shift into
6249 multiplication. */
6250 if (TREE_CODE (op1) == INTEGER_CST
6251 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6252 /* const_binop may not detect overflow correctly,
6253 so check for it explicitly here. */
6254 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6255 && 0 != (t1 = fold_convert (ctype,
6256 const_binop (LSHIFT_EXPR,
6257 size_one_node,
6258 op1)))
6259 && !TREE_OVERFLOW (t1))
6260 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6261 ? MULT_EXPR : FLOOR_DIV_EXPR,
6262 ctype,
6263 fold_convert (ctype, op0),
6264 t1),
6265 c, code, wide_type, strict_overflow_p);
6266 break;
6267
6268 case PLUS_EXPR: case MINUS_EXPR:
6269 /* See if we can eliminate the operation on both sides. If we can, we
6270 can return a new PLUS or MINUS. If we can't, the only remaining
6271 cases where we can do anything are if the second operand is a
6272 constant. */
6273 sub_strict_overflow_p = false;
6274 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6275 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6276 if (t1 != 0 && t2 != 0
6277 && (code == MULT_EXPR
6278 /* If not multiplication, we can only do this if both operands
6279 are divisible by c. */
6280 || (multiple_of_p (ctype, op0, c)
6281 && multiple_of_p (ctype, op1, c))))
6282 {
6283 if (sub_strict_overflow_p)
6284 *strict_overflow_p = true;
6285 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6286 fold_convert (ctype, t2));
6287 }
6288
6289 /* If this was a subtraction, negate OP1 and set it to be an addition.
6290 This simplifies the logic below. */
6291 if (tcode == MINUS_EXPR)
6292 {
6293 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6294 /* If OP1 was not easily negatable, the constant may be OP0. */
6295 if (TREE_CODE (op0) == INTEGER_CST)
6296 {
6297 std::swap (op0, op1);
6298 std::swap (t1, t2);
6299 }
6300 }
6301
6302 if (TREE_CODE (op1) != INTEGER_CST)
6303 break;
6304
6305 /* If either OP1 or C are negative, this optimization is not safe for
6306 some of the division and remainder types while for others we need
6307 to change the code. */
6308 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6309 {
6310 if (code == CEIL_DIV_EXPR)
6311 code = FLOOR_DIV_EXPR;
6312 else if (code == FLOOR_DIV_EXPR)
6313 code = CEIL_DIV_EXPR;
6314 else if (code != MULT_EXPR
6315 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6316 break;
6317 }
6318
6319 /* If it's a multiply or a division/modulus operation of a multiple
6320 of our constant, do the operation and verify it doesn't overflow. */
6321 if (code == MULT_EXPR
6322 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6323 {
6324 op1 = const_binop (code, fold_convert (ctype, op1),
6325 fold_convert (ctype, c));
6326 /* We allow the constant to overflow with wrapping semantics. */
6327 if (op1 == 0
6328 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6329 break;
6330 }
6331 else
6332 break;
6333
6334 /* If we have an unsigned type, we cannot widen the operation since it
6335 will change the result if the original computation overflowed. */
6336 if (TYPE_UNSIGNED (ctype) && ctype != type)
6337 break;
6338
6339 /* If we were able to eliminate our operation from the first side,
6340 apply our operation to the second side and reform the PLUS. */
6341 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6342 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6343
6344 /* The last case is if we are a multiply. In that case, we can
6345 apply the distributive law to commute the multiply and addition
6346 if the multiplication of the constants doesn't overflow
6347 and overflow is defined. With undefined overflow
6348 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6349 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6350 return fold_build2 (tcode, ctype,
6351 fold_build2 (code, ctype,
6352 fold_convert (ctype, op0),
6353 fold_convert (ctype, c)),
6354 op1);
6355
6356 break;
6357
6358 case MULT_EXPR:
6359 /* We have a special case here if we are doing something like
6360 (C * 8) % 4 since we know that's zero. */
6361 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6362 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6363 /* If the multiplication can overflow we cannot optimize this. */
6364 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6365 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6366 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6367 {
6368 *strict_overflow_p = true;
6369 return omit_one_operand (type, integer_zero_node, op0);
6370 }
6371
6372 /* ... fall through ... */
6373
6374 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6375 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6376 /* If we can extract our operation from the LHS, do so and return a
6377 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6378 do something only if the second operand is a constant. */
6379 if (same_p
6380 && (t1 = extract_muldiv (op0, c, code, wide_type,
6381 strict_overflow_p)) != 0)
6382 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6383 fold_convert (ctype, op1));
6384 else if (tcode == MULT_EXPR && code == MULT_EXPR
6385 && (t1 = extract_muldiv (op1, c, code, wide_type,
6386 strict_overflow_p)) != 0)
6387 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6388 fold_convert (ctype, t1));
6389 else if (TREE_CODE (op1) != INTEGER_CST)
6390 return 0;
6391
6392 /* If these are the same operation types, we can associate them
6393 assuming no overflow. */
6394 if (tcode == code)
6395 {
6396 bool overflow_p = false;
6397 bool overflow_mul_p;
6398 signop sign = TYPE_SIGN (ctype);
6399 unsigned prec = TYPE_PRECISION (ctype);
6400 wide_int mul = wi::mul (wide_int::from (op1, prec,
6401 TYPE_SIGN (TREE_TYPE (op1))),
6402 wide_int::from (c, prec,
6403 TYPE_SIGN (TREE_TYPE (c))),
6404 sign, &overflow_mul_p);
6405 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6406 if (overflow_mul_p
6407 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6408 overflow_p = true;
6409 if (!overflow_p)
6410 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6411 wide_int_to_tree (ctype, mul));
6412 }
6413
6414 /* If these operations "cancel" each other, we have the main
6415 optimizations of this pass, which occur when either constant is a
6416 multiple of the other, in which case we replace this with either an
6417 operation or CODE or TCODE.
6418
6419 If we have an unsigned type, we cannot do this since it will change
6420 the result if the original computation overflowed. */
6421 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6422 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6423 || (tcode == MULT_EXPR
6424 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6425 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6426 && code != MULT_EXPR)))
6427 {
6428 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6429 {
6430 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6431 *strict_overflow_p = true;
6432 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6433 fold_convert (ctype,
6434 const_binop (TRUNC_DIV_EXPR,
6435 op1, c)));
6436 }
6437 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6438 {
6439 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6440 *strict_overflow_p = true;
6441 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6442 fold_convert (ctype,
6443 const_binop (TRUNC_DIV_EXPR,
6444 c, op1)));
6445 }
6446 }
6447 break;
6448
6449 default:
6450 break;
6451 }
6452
6453 return 0;
6454 }
6455 \f
6456 /* Return a node which has the indicated constant VALUE (either 0 or
6457 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6458 and is of the indicated TYPE. */
6459
6460 tree
6461 constant_boolean_node (bool value, tree type)
6462 {
6463 if (type == integer_type_node)
6464 return value ? integer_one_node : integer_zero_node;
6465 else if (type == boolean_type_node)
6466 return value ? boolean_true_node : boolean_false_node;
6467 else if (TREE_CODE (type) == VECTOR_TYPE)
6468 return build_vector_from_val (type,
6469 build_int_cst (TREE_TYPE (type),
6470 value ? -1 : 0));
6471 else
6472 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6473 }
6474
6475
6476 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6477 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6478 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6479 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6480 COND is the first argument to CODE; otherwise (as in the example
6481 given here), it is the second argument. TYPE is the type of the
6482 original expression. Return NULL_TREE if no simplification is
6483 possible. */
6484
6485 static tree
6486 fold_binary_op_with_conditional_arg (location_t loc,
6487 enum tree_code code,
6488 tree type, tree op0, tree op1,
6489 tree cond, tree arg, int cond_first_p)
6490 {
6491 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6492 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6493 tree test, true_value, false_value;
6494 tree lhs = NULL_TREE;
6495 tree rhs = NULL_TREE;
6496 enum tree_code cond_code = COND_EXPR;
6497
6498 if (TREE_CODE (cond) == COND_EXPR
6499 || TREE_CODE (cond) == VEC_COND_EXPR)
6500 {
6501 test = TREE_OPERAND (cond, 0);
6502 true_value = TREE_OPERAND (cond, 1);
6503 false_value = TREE_OPERAND (cond, 2);
6504 /* If this operand throws an expression, then it does not make
6505 sense to try to perform a logical or arithmetic operation
6506 involving it. */
6507 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6508 lhs = true_value;
6509 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6510 rhs = false_value;
6511 }
6512 else if (!(TREE_CODE (type) != VECTOR_TYPE
6513 && TREE_CODE (TREE_TYPE (cond)) == VECTOR_TYPE))
6514 {
6515 tree testtype = TREE_TYPE (cond);
6516 test = cond;
6517 true_value = constant_boolean_node (true, testtype);
6518 false_value = constant_boolean_node (false, testtype);
6519 }
6520 else
6521 /* Detect the case of mixing vector and scalar types - bail out. */
6522 return NULL_TREE;
6523
6524 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6525 cond_code = VEC_COND_EXPR;
6526
6527 /* This transformation is only worthwhile if we don't have to wrap ARG
6528 in a SAVE_EXPR and the operation can be simplified without recursing
6529 on at least one of the branches once its pushed inside the COND_EXPR. */
6530 if (!TREE_CONSTANT (arg)
6531 && (TREE_SIDE_EFFECTS (arg)
6532 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6533 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6534 return NULL_TREE;
6535
6536 arg = fold_convert_loc (loc, arg_type, arg);
6537 if (lhs == 0)
6538 {
6539 true_value = fold_convert_loc (loc, cond_type, true_value);
6540 if (cond_first_p)
6541 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6542 else
6543 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6544 }
6545 if (rhs == 0)
6546 {
6547 false_value = fold_convert_loc (loc, cond_type, false_value);
6548 if (cond_first_p)
6549 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6550 else
6551 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6552 }
6553
6554 /* Check that we have simplified at least one of the branches. */
6555 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6556 return NULL_TREE;
6557
6558 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6559 }
6560
6561 \f
6562 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6563
6564 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6565 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6566 ADDEND is the same as X.
6567
6568 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6569 and finite. The problematic cases are when X is zero, and its mode
6570 has signed zeros. In the case of rounding towards -infinity,
6571 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6572 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6573
6574 bool
6575 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6576 {
6577 if (!real_zerop (addend))
6578 return false;
6579
6580 /* Don't allow the fold with -fsignaling-nans. */
6581 if (HONOR_SNANS (element_mode (type)))
6582 return false;
6583
6584 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6585 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6586 return true;
6587
6588 /* In a vector or complex, we would need to check the sign of all zeros. */
6589 if (TREE_CODE (addend) != REAL_CST)
6590 return false;
6591
6592 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6593 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6594 negate = !negate;
6595
6596 /* The mode has signed zeros, and we have to honor their sign.
6597 In this situation, there is only one case we can return true for.
6598 X - 0 is the same as X unless rounding towards -infinity is
6599 supported. */
6600 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6601 }
6602
6603 /* Subroutine of fold() that optimizes comparisons of a division by
6604 a nonzero integer constant against an integer constant, i.e.
6605 X/C1 op C2.
6606
6607 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6608 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6609 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6610
6611 The function returns the constant folded tree if a simplification
6612 can be made, and NULL_TREE otherwise. */
6613
6614 static tree
6615 fold_div_compare (location_t loc,
6616 enum tree_code code, tree type, tree arg0, tree arg1)
6617 {
6618 tree prod, tmp, hi, lo;
6619 tree arg00 = TREE_OPERAND (arg0, 0);
6620 tree arg01 = TREE_OPERAND (arg0, 1);
6621 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6622 bool neg_overflow = false;
6623 bool overflow;
6624
6625 /* We have to do this the hard way to detect unsigned overflow.
6626 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6627 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6628 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6629 neg_overflow = false;
6630
6631 if (sign == UNSIGNED)
6632 {
6633 tmp = int_const_binop (MINUS_EXPR, arg01,
6634 build_int_cst (TREE_TYPE (arg01), 1));
6635 lo = prod;
6636
6637 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6638 val = wi::add (prod, tmp, sign, &overflow);
6639 hi = force_fit_type (TREE_TYPE (arg00), val,
6640 -1, overflow | TREE_OVERFLOW (prod));
6641 }
6642 else if (tree_int_cst_sgn (arg01) >= 0)
6643 {
6644 tmp = int_const_binop (MINUS_EXPR, arg01,
6645 build_int_cst (TREE_TYPE (arg01), 1));
6646 switch (tree_int_cst_sgn (arg1))
6647 {
6648 case -1:
6649 neg_overflow = true;
6650 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6651 hi = prod;
6652 break;
6653
6654 case 0:
6655 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6656 hi = tmp;
6657 break;
6658
6659 case 1:
6660 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6661 lo = prod;
6662 break;
6663
6664 default:
6665 gcc_unreachable ();
6666 }
6667 }
6668 else
6669 {
6670 /* A negative divisor reverses the relational operators. */
6671 code = swap_tree_comparison (code);
6672
6673 tmp = int_const_binop (PLUS_EXPR, arg01,
6674 build_int_cst (TREE_TYPE (arg01), 1));
6675 switch (tree_int_cst_sgn (arg1))
6676 {
6677 case -1:
6678 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6679 lo = prod;
6680 break;
6681
6682 case 0:
6683 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6684 lo = tmp;
6685 break;
6686
6687 case 1:
6688 neg_overflow = true;
6689 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6690 hi = prod;
6691 break;
6692
6693 default:
6694 gcc_unreachable ();
6695 }
6696 }
6697
6698 switch (code)
6699 {
6700 case EQ_EXPR:
6701 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6702 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6703 if (TREE_OVERFLOW (hi))
6704 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6705 if (TREE_OVERFLOW (lo))
6706 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6707 return build_range_check (loc, type, arg00, 1, lo, hi);
6708
6709 case NE_EXPR:
6710 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6711 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6712 if (TREE_OVERFLOW (hi))
6713 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6714 if (TREE_OVERFLOW (lo))
6715 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6716 return build_range_check (loc, type, arg00, 0, lo, hi);
6717
6718 case LT_EXPR:
6719 if (TREE_OVERFLOW (lo))
6720 {
6721 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6722 return omit_one_operand_loc (loc, type, tmp, arg00);
6723 }
6724 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6725
6726 case LE_EXPR:
6727 if (TREE_OVERFLOW (hi))
6728 {
6729 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6730 return omit_one_operand_loc (loc, type, tmp, arg00);
6731 }
6732 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6733
6734 case GT_EXPR:
6735 if (TREE_OVERFLOW (hi))
6736 {
6737 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6738 return omit_one_operand_loc (loc, type, tmp, arg00);
6739 }
6740 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6741
6742 case GE_EXPR:
6743 if (TREE_OVERFLOW (lo))
6744 {
6745 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6746 return omit_one_operand_loc (loc, type, tmp, arg00);
6747 }
6748 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6749
6750 default:
6751 break;
6752 }
6753
6754 return NULL_TREE;
6755 }
6756
6757
6758 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6759 equality/inequality test, then return a simplified form of the test
6760 using a sign testing. Otherwise return NULL. TYPE is the desired
6761 result type. */
6762
6763 static tree
6764 fold_single_bit_test_into_sign_test (location_t loc,
6765 enum tree_code code, tree arg0, tree arg1,
6766 tree result_type)
6767 {
6768 /* If this is testing a single bit, we can optimize the test. */
6769 if ((code == NE_EXPR || code == EQ_EXPR)
6770 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6771 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6772 {
6773 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6774 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6775 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6776
6777 if (arg00 != NULL_TREE
6778 /* This is only a win if casting to a signed type is cheap,
6779 i.e. when arg00's type is not a partial mode. */
6780 && TYPE_PRECISION (TREE_TYPE (arg00))
6781 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6782 {
6783 tree stype = signed_type_for (TREE_TYPE (arg00));
6784 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6785 result_type,
6786 fold_convert_loc (loc, stype, arg00),
6787 build_int_cst (stype, 0));
6788 }
6789 }
6790
6791 return NULL_TREE;
6792 }
6793
6794 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6795 equality/inequality test, then return a simplified form of
6796 the test using shifts and logical operations. Otherwise return
6797 NULL. TYPE is the desired result type. */
6798
6799 tree
6800 fold_single_bit_test (location_t loc, enum tree_code code,
6801 tree arg0, tree arg1, tree result_type)
6802 {
6803 /* If this is testing a single bit, we can optimize the test. */
6804 if ((code == NE_EXPR || code == EQ_EXPR)
6805 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6806 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6807 {
6808 tree inner = TREE_OPERAND (arg0, 0);
6809 tree type = TREE_TYPE (arg0);
6810 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6811 machine_mode operand_mode = TYPE_MODE (type);
6812 int ops_unsigned;
6813 tree signed_type, unsigned_type, intermediate_type;
6814 tree tem, one;
6815
6816 /* First, see if we can fold the single bit test into a sign-bit
6817 test. */
6818 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6819 result_type);
6820 if (tem)
6821 return tem;
6822
6823 /* Otherwise we have (A & C) != 0 where C is a single bit,
6824 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6825 Similarly for (A & C) == 0. */
6826
6827 /* If INNER is a right shift of a constant and it plus BITNUM does
6828 not overflow, adjust BITNUM and INNER. */
6829 if (TREE_CODE (inner) == RSHIFT_EXPR
6830 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6831 && bitnum < TYPE_PRECISION (type)
6832 && wi::ltu_p (TREE_OPERAND (inner, 1),
6833 TYPE_PRECISION (type) - bitnum))
6834 {
6835 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6836 inner = TREE_OPERAND (inner, 0);
6837 }
6838
6839 /* If we are going to be able to omit the AND below, we must do our
6840 operations as unsigned. If we must use the AND, we have a choice.
6841 Normally unsigned is faster, but for some machines signed is. */
6842 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6843 && !flag_syntax_only) ? 0 : 1;
6844
6845 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6846 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6847 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6848 inner = fold_convert_loc (loc, intermediate_type, inner);
6849
6850 if (bitnum != 0)
6851 inner = build2 (RSHIFT_EXPR, intermediate_type,
6852 inner, size_int (bitnum));
6853
6854 one = build_int_cst (intermediate_type, 1);
6855
6856 if (code == EQ_EXPR)
6857 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6858
6859 /* Put the AND last so it can combine with more things. */
6860 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6861
6862 /* Make sure to return the proper type. */
6863 inner = fold_convert_loc (loc, result_type, inner);
6864
6865 return inner;
6866 }
6867 return NULL_TREE;
6868 }
6869
6870 /* Check whether we are allowed to reorder operands arg0 and arg1,
6871 such that the evaluation of arg1 occurs before arg0. */
6872
6873 static bool
6874 reorder_operands_p (const_tree arg0, const_tree arg1)
6875 {
6876 if (! flag_evaluation_order)
6877 return true;
6878 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6879 return true;
6880 return ! TREE_SIDE_EFFECTS (arg0)
6881 && ! TREE_SIDE_EFFECTS (arg1);
6882 }
6883
6884 /* Test whether it is preferable two swap two operands, ARG0 and
6885 ARG1, for example because ARG0 is an integer constant and ARG1
6886 isn't. If REORDER is true, only recommend swapping if we can
6887 evaluate the operands in reverse order. */
6888
6889 bool
6890 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6891 {
6892 if (CONSTANT_CLASS_P (arg1))
6893 return 0;
6894 if (CONSTANT_CLASS_P (arg0))
6895 return 1;
6896
6897 STRIP_NOPS (arg0);
6898 STRIP_NOPS (arg1);
6899
6900 if (TREE_CONSTANT (arg1))
6901 return 0;
6902 if (TREE_CONSTANT (arg0))
6903 return 1;
6904
6905 if (reorder && flag_evaluation_order
6906 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6907 return 0;
6908
6909 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6910 for commutative and comparison operators. Ensuring a canonical
6911 form allows the optimizers to find additional redundancies without
6912 having to explicitly check for both orderings. */
6913 if (TREE_CODE (arg0) == SSA_NAME
6914 && TREE_CODE (arg1) == SSA_NAME
6915 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6916 return 1;
6917
6918 /* Put SSA_NAMEs last. */
6919 if (TREE_CODE (arg1) == SSA_NAME)
6920 return 0;
6921 if (TREE_CODE (arg0) == SSA_NAME)
6922 return 1;
6923
6924 /* Put variables last. */
6925 if (DECL_P (arg1))
6926 return 0;
6927 if (DECL_P (arg0))
6928 return 1;
6929
6930 return 0;
6931 }
6932
6933
6934 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6935 means A >= Y && A != MAX, but in this case we know that
6936 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6937
6938 static tree
6939 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6940 {
6941 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6942
6943 if (TREE_CODE (bound) == LT_EXPR)
6944 a = TREE_OPERAND (bound, 0);
6945 else if (TREE_CODE (bound) == GT_EXPR)
6946 a = TREE_OPERAND (bound, 1);
6947 else
6948 return NULL_TREE;
6949
6950 typea = TREE_TYPE (a);
6951 if (!INTEGRAL_TYPE_P (typea)
6952 && !POINTER_TYPE_P (typea))
6953 return NULL_TREE;
6954
6955 if (TREE_CODE (ineq) == LT_EXPR)
6956 {
6957 a1 = TREE_OPERAND (ineq, 1);
6958 y = TREE_OPERAND (ineq, 0);
6959 }
6960 else if (TREE_CODE (ineq) == GT_EXPR)
6961 {
6962 a1 = TREE_OPERAND (ineq, 0);
6963 y = TREE_OPERAND (ineq, 1);
6964 }
6965 else
6966 return NULL_TREE;
6967
6968 if (TREE_TYPE (a1) != typea)
6969 return NULL_TREE;
6970
6971 if (POINTER_TYPE_P (typea))
6972 {
6973 /* Convert the pointer types into integer before taking the difference. */
6974 tree ta = fold_convert_loc (loc, ssizetype, a);
6975 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6976 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6977 }
6978 else
6979 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6980
6981 if (!diff || !integer_onep (diff))
6982 return NULL_TREE;
6983
6984 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6985 }
6986
6987 /* Fold a sum or difference of at least one multiplication.
6988 Returns the folded tree or NULL if no simplification could be made. */
6989
6990 static tree
6991 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6992 tree arg0, tree arg1)
6993 {
6994 tree arg00, arg01, arg10, arg11;
6995 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6996
6997 /* (A * C) +- (B * C) -> (A+-B) * C.
6998 (A * C) +- A -> A * (C+-1).
6999 We are most concerned about the case where C is a constant,
7000 but other combinations show up during loop reduction. Since
7001 it is not difficult, try all four possibilities. */
7002
7003 if (TREE_CODE (arg0) == MULT_EXPR)
7004 {
7005 arg00 = TREE_OPERAND (arg0, 0);
7006 arg01 = TREE_OPERAND (arg0, 1);
7007 }
7008 else if (TREE_CODE (arg0) == INTEGER_CST)
7009 {
7010 arg00 = build_one_cst (type);
7011 arg01 = arg0;
7012 }
7013 else
7014 {
7015 /* We cannot generate constant 1 for fract. */
7016 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7017 return NULL_TREE;
7018 arg00 = arg0;
7019 arg01 = build_one_cst (type);
7020 }
7021 if (TREE_CODE (arg1) == MULT_EXPR)
7022 {
7023 arg10 = TREE_OPERAND (arg1, 0);
7024 arg11 = TREE_OPERAND (arg1, 1);
7025 }
7026 else if (TREE_CODE (arg1) == INTEGER_CST)
7027 {
7028 arg10 = build_one_cst (type);
7029 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
7030 the purpose of this canonicalization. */
7031 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
7032 && negate_expr_p (arg1)
7033 && code == PLUS_EXPR)
7034 {
7035 arg11 = negate_expr (arg1);
7036 code = MINUS_EXPR;
7037 }
7038 else
7039 arg11 = arg1;
7040 }
7041 else
7042 {
7043 /* We cannot generate constant 1 for fract. */
7044 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
7045 return NULL_TREE;
7046 arg10 = arg1;
7047 arg11 = build_one_cst (type);
7048 }
7049 same = NULL_TREE;
7050
7051 if (operand_equal_p (arg01, arg11, 0))
7052 same = arg01, alt0 = arg00, alt1 = arg10;
7053 else if (operand_equal_p (arg00, arg10, 0))
7054 same = arg00, alt0 = arg01, alt1 = arg11;
7055 else if (operand_equal_p (arg00, arg11, 0))
7056 same = arg00, alt0 = arg01, alt1 = arg10;
7057 else if (operand_equal_p (arg01, arg10, 0))
7058 same = arg01, alt0 = arg00, alt1 = arg11;
7059
7060 /* No identical multiplicands; see if we can find a common
7061 power-of-two factor in non-power-of-two multiplies. This
7062 can help in multi-dimensional array access. */
7063 else if (tree_fits_shwi_p (arg01)
7064 && tree_fits_shwi_p (arg11))
7065 {
7066 HOST_WIDE_INT int01, int11, tmp;
7067 bool swap = false;
7068 tree maybe_same;
7069 int01 = tree_to_shwi (arg01);
7070 int11 = tree_to_shwi (arg11);
7071
7072 /* Move min of absolute values to int11. */
7073 if (absu_hwi (int01) < absu_hwi (int11))
7074 {
7075 tmp = int01, int01 = int11, int11 = tmp;
7076 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7077 maybe_same = arg01;
7078 swap = true;
7079 }
7080 else
7081 maybe_same = arg11;
7082
7083 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7084 /* The remainder should not be a constant, otherwise we
7085 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7086 increased the number of multiplications necessary. */
7087 && TREE_CODE (arg10) != INTEGER_CST)
7088 {
7089 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7090 build_int_cst (TREE_TYPE (arg00),
7091 int01 / int11));
7092 alt1 = arg10;
7093 same = maybe_same;
7094 if (swap)
7095 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7096 }
7097 }
7098
7099 if (same)
7100 return fold_build2_loc (loc, MULT_EXPR, type,
7101 fold_build2_loc (loc, code, type,
7102 fold_convert_loc (loc, type, alt0),
7103 fold_convert_loc (loc, type, alt1)),
7104 fold_convert_loc (loc, type, same));
7105
7106 return NULL_TREE;
7107 }
7108
7109 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7110 specified by EXPR into the buffer PTR of length LEN bytes.
7111 Return the number of bytes placed in the buffer, or zero
7112 upon failure. */
7113
7114 static int
7115 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7116 {
7117 tree type = TREE_TYPE (expr);
7118 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7119 int byte, offset, word, words;
7120 unsigned char value;
7121
7122 if ((off == -1 && total_bytes > len)
7123 || off >= total_bytes)
7124 return 0;
7125 if (off == -1)
7126 off = 0;
7127 words = total_bytes / UNITS_PER_WORD;
7128
7129 for (byte = 0; byte < total_bytes; byte++)
7130 {
7131 int bitpos = byte * BITS_PER_UNIT;
7132 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7133 number of bytes. */
7134 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7135
7136 if (total_bytes > UNITS_PER_WORD)
7137 {
7138 word = byte / UNITS_PER_WORD;
7139 if (WORDS_BIG_ENDIAN)
7140 word = (words - 1) - word;
7141 offset = word * UNITS_PER_WORD;
7142 if (BYTES_BIG_ENDIAN)
7143 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7144 else
7145 offset += byte % UNITS_PER_WORD;
7146 }
7147 else
7148 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7149 if (offset >= off
7150 && offset - off < len)
7151 ptr[offset - off] = value;
7152 }
7153 return MIN (len, total_bytes - off);
7154 }
7155
7156
7157 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7158 specified by EXPR into the buffer PTR of length LEN bytes.
7159 Return the number of bytes placed in the buffer, or zero
7160 upon failure. */
7161
7162 static int
7163 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7164 {
7165 tree type = TREE_TYPE (expr);
7166 machine_mode mode = TYPE_MODE (type);
7167 int total_bytes = GET_MODE_SIZE (mode);
7168 FIXED_VALUE_TYPE value;
7169 tree i_value, i_type;
7170
7171 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7172 return 0;
7173
7174 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7175
7176 if (NULL_TREE == i_type
7177 || TYPE_PRECISION (i_type) != total_bytes)
7178 return 0;
7179
7180 value = TREE_FIXED_CST (expr);
7181 i_value = double_int_to_tree (i_type, value.data);
7182
7183 return native_encode_int (i_value, ptr, len, off);
7184 }
7185
7186
7187 /* Subroutine of native_encode_expr. Encode the REAL_CST
7188 specified by EXPR into the buffer PTR of length LEN bytes.
7189 Return the number of bytes placed in the buffer, or zero
7190 upon failure. */
7191
7192 static int
7193 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7194 {
7195 tree type = TREE_TYPE (expr);
7196 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7197 int byte, offset, word, words, bitpos;
7198 unsigned char value;
7199
7200 /* There are always 32 bits in each long, no matter the size of
7201 the hosts long. We handle floating point representations with
7202 up to 192 bits. */
7203 long tmp[6];
7204
7205 if ((off == -1 && total_bytes > len)
7206 || off >= total_bytes)
7207 return 0;
7208 if (off == -1)
7209 off = 0;
7210 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7211
7212 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7213
7214 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7215 bitpos += BITS_PER_UNIT)
7216 {
7217 byte = (bitpos / BITS_PER_UNIT) & 3;
7218 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7219
7220 if (UNITS_PER_WORD < 4)
7221 {
7222 word = byte / UNITS_PER_WORD;
7223 if (WORDS_BIG_ENDIAN)
7224 word = (words - 1) - word;
7225 offset = word * UNITS_PER_WORD;
7226 if (BYTES_BIG_ENDIAN)
7227 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7228 else
7229 offset += byte % UNITS_PER_WORD;
7230 }
7231 else
7232 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7233 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7234 if (offset >= off
7235 && offset - off < len)
7236 ptr[offset - off] = value;
7237 }
7238 return MIN (len, total_bytes - off);
7239 }
7240
7241 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7242 specified by EXPR into the buffer PTR of length LEN bytes.
7243 Return the number of bytes placed in the buffer, or zero
7244 upon failure. */
7245
7246 static int
7247 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7248 {
7249 int rsize, isize;
7250 tree part;
7251
7252 part = TREE_REALPART (expr);
7253 rsize = native_encode_expr (part, ptr, len, off);
7254 if (off == -1
7255 && rsize == 0)
7256 return 0;
7257 part = TREE_IMAGPART (expr);
7258 if (off != -1)
7259 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7260 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7261 if (off == -1
7262 && isize != rsize)
7263 return 0;
7264 return rsize + isize;
7265 }
7266
7267
7268 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7269 specified by EXPR into the buffer PTR of length LEN bytes.
7270 Return the number of bytes placed in the buffer, or zero
7271 upon failure. */
7272
7273 static int
7274 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7275 {
7276 unsigned i, count;
7277 int size, offset;
7278 tree itype, elem;
7279
7280 offset = 0;
7281 count = VECTOR_CST_NELTS (expr);
7282 itype = TREE_TYPE (TREE_TYPE (expr));
7283 size = GET_MODE_SIZE (TYPE_MODE (itype));
7284 for (i = 0; i < count; i++)
7285 {
7286 if (off >= size)
7287 {
7288 off -= size;
7289 continue;
7290 }
7291 elem = VECTOR_CST_ELT (expr, i);
7292 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7293 if ((off == -1 && res != size)
7294 || res == 0)
7295 return 0;
7296 offset += res;
7297 if (offset >= len)
7298 return offset;
7299 if (off != -1)
7300 off = 0;
7301 }
7302 return offset;
7303 }
7304
7305
7306 /* Subroutine of native_encode_expr. Encode the STRING_CST
7307 specified by EXPR into the buffer PTR of length LEN bytes.
7308 Return the number of bytes placed in the buffer, or zero
7309 upon failure. */
7310
7311 static int
7312 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7313 {
7314 tree type = TREE_TYPE (expr);
7315 HOST_WIDE_INT total_bytes;
7316
7317 if (TREE_CODE (type) != ARRAY_TYPE
7318 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7319 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7320 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7321 return 0;
7322 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7323 if ((off == -1 && total_bytes > len)
7324 || off >= total_bytes)
7325 return 0;
7326 if (off == -1)
7327 off = 0;
7328 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7329 {
7330 int written = 0;
7331 if (off < TREE_STRING_LENGTH (expr))
7332 {
7333 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7334 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7335 }
7336 memset (ptr + written, 0,
7337 MIN (total_bytes - written, len - written));
7338 }
7339 else
7340 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7341 return MIN (total_bytes - off, len);
7342 }
7343
7344
7345 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7346 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7347 buffer PTR of length LEN bytes. If OFF is not -1 then start
7348 the encoding at byte offset OFF and encode at most LEN bytes.
7349 Return the number of bytes placed in the buffer, or zero upon failure. */
7350
7351 int
7352 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7353 {
7354 /* We don't support starting at negative offset and -1 is special. */
7355 if (off < -1)
7356 return 0;
7357
7358 switch (TREE_CODE (expr))
7359 {
7360 case INTEGER_CST:
7361 return native_encode_int (expr, ptr, len, off);
7362
7363 case REAL_CST:
7364 return native_encode_real (expr, ptr, len, off);
7365
7366 case FIXED_CST:
7367 return native_encode_fixed (expr, ptr, len, off);
7368
7369 case COMPLEX_CST:
7370 return native_encode_complex (expr, ptr, len, off);
7371
7372 case VECTOR_CST:
7373 return native_encode_vector (expr, ptr, len, off);
7374
7375 case STRING_CST:
7376 return native_encode_string (expr, ptr, len, off);
7377
7378 default:
7379 return 0;
7380 }
7381 }
7382
7383
7384 /* Subroutine of native_interpret_expr. Interpret the contents of
7385 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7386 If the buffer cannot be interpreted, return NULL_TREE. */
7387
7388 static tree
7389 native_interpret_int (tree type, const unsigned char *ptr, int len)
7390 {
7391 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7392
7393 if (total_bytes > len
7394 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7395 return NULL_TREE;
7396
7397 wide_int result = wi::from_buffer (ptr, total_bytes);
7398
7399 return wide_int_to_tree (type, result);
7400 }
7401
7402
7403 /* Subroutine of native_interpret_expr. Interpret the contents of
7404 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7405 If the buffer cannot be interpreted, return NULL_TREE. */
7406
7407 static tree
7408 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7409 {
7410 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7411 double_int result;
7412 FIXED_VALUE_TYPE fixed_value;
7413
7414 if (total_bytes > len
7415 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7416 return NULL_TREE;
7417
7418 result = double_int::from_buffer (ptr, total_bytes);
7419 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7420
7421 return build_fixed (type, fixed_value);
7422 }
7423
7424
7425 /* Subroutine of native_interpret_expr. Interpret the contents of
7426 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7427 If the buffer cannot be interpreted, return NULL_TREE. */
7428
7429 static tree
7430 native_interpret_real (tree type, const unsigned char *ptr, int len)
7431 {
7432 machine_mode mode = TYPE_MODE (type);
7433 int total_bytes = GET_MODE_SIZE (mode);
7434 unsigned char value;
7435 /* There are always 32 bits in each long, no matter the size of
7436 the hosts long. We handle floating point representations with
7437 up to 192 bits. */
7438 REAL_VALUE_TYPE r;
7439 long tmp[6];
7440
7441 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7442 if (total_bytes > len || total_bytes > 24)
7443 return NULL_TREE;
7444 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7445
7446 memset (tmp, 0, sizeof (tmp));
7447 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7448 bitpos += BITS_PER_UNIT)
7449 {
7450 /* Both OFFSET and BYTE index within a long;
7451 bitpos indexes the whole float. */
7452 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7453 if (UNITS_PER_WORD < 4)
7454 {
7455 int word = byte / UNITS_PER_WORD;
7456 if (WORDS_BIG_ENDIAN)
7457 word = (words - 1) - word;
7458 offset = word * UNITS_PER_WORD;
7459 if (BYTES_BIG_ENDIAN)
7460 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7461 else
7462 offset += byte % UNITS_PER_WORD;
7463 }
7464 else
7465 {
7466 offset = byte;
7467 if (BYTES_BIG_ENDIAN)
7468 {
7469 /* Reverse bytes within each long, or within the entire float
7470 if it's smaller than a long (for HFmode). */
7471 offset = MIN (3, total_bytes - 1) - offset;
7472 gcc_assert (offset >= 0);
7473 }
7474 }
7475 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7476
7477 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7478 }
7479
7480 real_from_target (&r, tmp, mode);
7481 return build_real (type, r);
7482 }
7483
7484
7485 /* Subroutine of native_interpret_expr. Interpret the contents of
7486 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7487 If the buffer cannot be interpreted, return NULL_TREE. */
7488
7489 static tree
7490 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7491 {
7492 tree etype, rpart, ipart;
7493 int size;
7494
7495 etype = TREE_TYPE (type);
7496 size = GET_MODE_SIZE (TYPE_MODE (etype));
7497 if (size * 2 > len)
7498 return NULL_TREE;
7499 rpart = native_interpret_expr (etype, ptr, size);
7500 if (!rpart)
7501 return NULL_TREE;
7502 ipart = native_interpret_expr (etype, ptr+size, size);
7503 if (!ipart)
7504 return NULL_TREE;
7505 return build_complex (type, rpart, ipart);
7506 }
7507
7508
7509 /* Subroutine of native_interpret_expr. Interpret the contents of
7510 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7511 If the buffer cannot be interpreted, return NULL_TREE. */
7512
7513 static tree
7514 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7515 {
7516 tree etype, elem;
7517 int i, size, count;
7518 tree *elements;
7519
7520 etype = TREE_TYPE (type);
7521 size = GET_MODE_SIZE (TYPE_MODE (etype));
7522 count = TYPE_VECTOR_SUBPARTS (type);
7523 if (size * count > len)
7524 return NULL_TREE;
7525
7526 elements = XALLOCAVEC (tree, count);
7527 for (i = count - 1; i >= 0; i--)
7528 {
7529 elem = native_interpret_expr (etype, ptr+(i*size), size);
7530 if (!elem)
7531 return NULL_TREE;
7532 elements[i] = elem;
7533 }
7534 return build_vector (type, elements);
7535 }
7536
7537
7538 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7539 the buffer PTR of length LEN as a constant of type TYPE. For
7540 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7541 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7542 return NULL_TREE. */
7543
7544 tree
7545 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7546 {
7547 switch (TREE_CODE (type))
7548 {
7549 case INTEGER_TYPE:
7550 case ENUMERAL_TYPE:
7551 case BOOLEAN_TYPE:
7552 case POINTER_TYPE:
7553 case REFERENCE_TYPE:
7554 return native_interpret_int (type, ptr, len);
7555
7556 case REAL_TYPE:
7557 return native_interpret_real (type, ptr, len);
7558
7559 case FIXED_POINT_TYPE:
7560 return native_interpret_fixed (type, ptr, len);
7561
7562 case COMPLEX_TYPE:
7563 return native_interpret_complex (type, ptr, len);
7564
7565 case VECTOR_TYPE:
7566 return native_interpret_vector (type, ptr, len);
7567
7568 default:
7569 return NULL_TREE;
7570 }
7571 }
7572
7573 /* Returns true if we can interpret the contents of a native encoding
7574 as TYPE. */
7575
7576 static bool
7577 can_native_interpret_type_p (tree type)
7578 {
7579 switch (TREE_CODE (type))
7580 {
7581 case INTEGER_TYPE:
7582 case ENUMERAL_TYPE:
7583 case BOOLEAN_TYPE:
7584 case POINTER_TYPE:
7585 case REFERENCE_TYPE:
7586 case FIXED_POINT_TYPE:
7587 case REAL_TYPE:
7588 case COMPLEX_TYPE:
7589 case VECTOR_TYPE:
7590 return true;
7591 default:
7592 return false;
7593 }
7594 }
7595
7596 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7597 TYPE at compile-time. If we're unable to perform the conversion
7598 return NULL_TREE. */
7599
7600 static tree
7601 fold_view_convert_expr (tree type, tree expr)
7602 {
7603 /* We support up to 512-bit values (for V8DFmode). */
7604 unsigned char buffer[64];
7605 int len;
7606
7607 /* Check that the host and target are sane. */
7608 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7609 return NULL_TREE;
7610
7611 len = native_encode_expr (expr, buffer, sizeof (buffer));
7612 if (len == 0)
7613 return NULL_TREE;
7614
7615 return native_interpret_expr (type, buffer, len);
7616 }
7617
7618 /* Build an expression for the address of T. Folds away INDIRECT_REF
7619 to avoid confusing the gimplify process. */
7620
7621 tree
7622 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7623 {
7624 /* The size of the object is not relevant when talking about its address. */
7625 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7626 t = TREE_OPERAND (t, 0);
7627
7628 if (TREE_CODE (t) == INDIRECT_REF)
7629 {
7630 t = TREE_OPERAND (t, 0);
7631
7632 if (TREE_TYPE (t) != ptrtype)
7633 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7634 }
7635 else if (TREE_CODE (t) == MEM_REF
7636 && integer_zerop (TREE_OPERAND (t, 1)))
7637 return TREE_OPERAND (t, 0);
7638 else if (TREE_CODE (t) == MEM_REF
7639 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7640 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7641 TREE_OPERAND (t, 0),
7642 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7643 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7644 {
7645 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7646
7647 if (TREE_TYPE (t) != ptrtype)
7648 t = fold_convert_loc (loc, ptrtype, t);
7649 }
7650 else
7651 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7652
7653 return t;
7654 }
7655
7656 /* Build an expression for the address of T. */
7657
7658 tree
7659 build_fold_addr_expr_loc (location_t loc, tree t)
7660 {
7661 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7662
7663 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7664 }
7665
7666 /* Fold a unary expression of code CODE and type TYPE with operand
7667 OP0. Return the folded expression if folding is successful.
7668 Otherwise, return NULL_TREE. */
7669
7670 tree
7671 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7672 {
7673 tree tem;
7674 tree arg0;
7675 enum tree_code_class kind = TREE_CODE_CLASS (code);
7676
7677 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7678 && TREE_CODE_LENGTH (code) == 1);
7679
7680 arg0 = op0;
7681 if (arg0)
7682 {
7683 if (CONVERT_EXPR_CODE_P (code)
7684 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7685 {
7686 /* Don't use STRIP_NOPS, because signedness of argument type
7687 matters. */
7688 STRIP_SIGN_NOPS (arg0);
7689 }
7690 else
7691 {
7692 /* Strip any conversions that don't change the mode. This
7693 is safe for every expression, except for a comparison
7694 expression because its signedness is derived from its
7695 operands.
7696
7697 Note that this is done as an internal manipulation within
7698 the constant folder, in order to find the simplest
7699 representation of the arguments so that their form can be
7700 studied. In any cases, the appropriate type conversions
7701 should be put back in the tree that will get out of the
7702 constant folder. */
7703 STRIP_NOPS (arg0);
7704 }
7705
7706 if (CONSTANT_CLASS_P (arg0))
7707 {
7708 tree tem = const_unop (code, type, arg0);
7709 if (tem)
7710 {
7711 if (TREE_TYPE (tem) != type)
7712 tem = fold_convert_loc (loc, type, tem);
7713 return tem;
7714 }
7715 }
7716 }
7717
7718 tem = generic_simplify (loc, code, type, op0);
7719 if (tem)
7720 return tem;
7721
7722 if (TREE_CODE_CLASS (code) == tcc_unary)
7723 {
7724 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7725 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7726 fold_build1_loc (loc, code, type,
7727 fold_convert_loc (loc, TREE_TYPE (op0),
7728 TREE_OPERAND (arg0, 1))));
7729 else if (TREE_CODE (arg0) == COND_EXPR)
7730 {
7731 tree arg01 = TREE_OPERAND (arg0, 1);
7732 tree arg02 = TREE_OPERAND (arg0, 2);
7733 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7734 arg01 = fold_build1_loc (loc, code, type,
7735 fold_convert_loc (loc,
7736 TREE_TYPE (op0), arg01));
7737 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7738 arg02 = fold_build1_loc (loc, code, type,
7739 fold_convert_loc (loc,
7740 TREE_TYPE (op0), arg02));
7741 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7742 arg01, arg02);
7743
7744 /* If this was a conversion, and all we did was to move into
7745 inside the COND_EXPR, bring it back out. But leave it if
7746 it is a conversion from integer to integer and the
7747 result precision is no wider than a word since such a
7748 conversion is cheap and may be optimized away by combine,
7749 while it couldn't if it were outside the COND_EXPR. Then return
7750 so we don't get into an infinite recursion loop taking the
7751 conversion out and then back in. */
7752
7753 if ((CONVERT_EXPR_CODE_P (code)
7754 || code == NON_LVALUE_EXPR)
7755 && TREE_CODE (tem) == COND_EXPR
7756 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7757 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7758 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7759 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7760 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7761 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7762 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7763 && (INTEGRAL_TYPE_P
7764 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7765 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7766 || flag_syntax_only))
7767 tem = build1_loc (loc, code, type,
7768 build3 (COND_EXPR,
7769 TREE_TYPE (TREE_OPERAND
7770 (TREE_OPERAND (tem, 1), 0)),
7771 TREE_OPERAND (tem, 0),
7772 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7773 TREE_OPERAND (TREE_OPERAND (tem, 2),
7774 0)));
7775 return tem;
7776 }
7777 }
7778
7779 switch (code)
7780 {
7781 case NON_LVALUE_EXPR:
7782 if (!maybe_lvalue_p (op0))
7783 return fold_convert_loc (loc, type, op0);
7784 return NULL_TREE;
7785
7786 CASE_CONVERT:
7787 case FLOAT_EXPR:
7788 case FIX_TRUNC_EXPR:
7789 if (COMPARISON_CLASS_P (op0))
7790 {
7791 /* If we have (type) (a CMP b) and type is an integral type, return
7792 new expression involving the new type. Canonicalize
7793 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7794 non-integral type.
7795 Do not fold the result as that would not simplify further, also
7796 folding again results in recursions. */
7797 if (TREE_CODE (type) == BOOLEAN_TYPE)
7798 return build2_loc (loc, TREE_CODE (op0), type,
7799 TREE_OPERAND (op0, 0),
7800 TREE_OPERAND (op0, 1));
7801 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7802 && TREE_CODE (type) != VECTOR_TYPE)
7803 return build3_loc (loc, COND_EXPR, type, op0,
7804 constant_boolean_node (true, type),
7805 constant_boolean_node (false, type));
7806 }
7807
7808 /* Handle (T *)&A.B.C for A being of type T and B and C
7809 living at offset zero. This occurs frequently in
7810 C++ upcasting and then accessing the base. */
7811 if (TREE_CODE (op0) == ADDR_EXPR
7812 && POINTER_TYPE_P (type)
7813 && handled_component_p (TREE_OPERAND (op0, 0)))
7814 {
7815 HOST_WIDE_INT bitsize, bitpos;
7816 tree offset;
7817 machine_mode mode;
7818 int unsignedp, reversep, volatilep;
7819 tree base
7820 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7821 &offset, &mode, &unsignedp, &reversep,
7822 &volatilep, false);
7823 /* If the reference was to a (constant) zero offset, we can use
7824 the address of the base if it has the same base type
7825 as the result type and the pointer type is unqualified. */
7826 if (! offset && bitpos == 0
7827 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7828 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7829 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7830 return fold_convert_loc (loc, type,
7831 build_fold_addr_expr_loc (loc, base));
7832 }
7833
7834 if (TREE_CODE (op0) == MODIFY_EXPR
7835 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7836 /* Detect assigning a bitfield. */
7837 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7838 && DECL_BIT_FIELD
7839 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7840 {
7841 /* Don't leave an assignment inside a conversion
7842 unless assigning a bitfield. */
7843 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7844 /* First do the assignment, then return converted constant. */
7845 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7846 TREE_NO_WARNING (tem) = 1;
7847 TREE_USED (tem) = 1;
7848 return tem;
7849 }
7850
7851 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7852 constants (if x has signed type, the sign bit cannot be set
7853 in c). This folds extension into the BIT_AND_EXPR.
7854 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7855 very likely don't have maximal range for their precision and this
7856 transformation effectively doesn't preserve non-maximal ranges. */
7857 if (TREE_CODE (type) == INTEGER_TYPE
7858 && TREE_CODE (op0) == BIT_AND_EXPR
7859 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7860 {
7861 tree and_expr = op0;
7862 tree and0 = TREE_OPERAND (and_expr, 0);
7863 tree and1 = TREE_OPERAND (and_expr, 1);
7864 int change = 0;
7865
7866 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7867 || (TYPE_PRECISION (type)
7868 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7869 change = 1;
7870 else if (TYPE_PRECISION (TREE_TYPE (and1))
7871 <= HOST_BITS_PER_WIDE_INT
7872 && tree_fits_uhwi_p (and1))
7873 {
7874 unsigned HOST_WIDE_INT cst;
7875
7876 cst = tree_to_uhwi (and1);
7877 cst &= HOST_WIDE_INT_M1U
7878 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7879 change = (cst == 0);
7880 if (change
7881 && !flag_syntax_only
7882 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7883 == ZERO_EXTEND))
7884 {
7885 tree uns = unsigned_type_for (TREE_TYPE (and0));
7886 and0 = fold_convert_loc (loc, uns, and0);
7887 and1 = fold_convert_loc (loc, uns, and1);
7888 }
7889 }
7890 if (change)
7891 {
7892 tem = force_fit_type (type, wi::to_widest (and1), 0,
7893 TREE_OVERFLOW (and1));
7894 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7895 fold_convert_loc (loc, type, and0), tem);
7896 }
7897 }
7898
7899 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7900 cast (T1)X will fold away. We assume that this happens when X itself
7901 is a cast. */
7902 if (POINTER_TYPE_P (type)
7903 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7904 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7905 {
7906 tree arg00 = TREE_OPERAND (arg0, 0);
7907 tree arg01 = TREE_OPERAND (arg0, 1);
7908
7909 return fold_build_pointer_plus_loc
7910 (loc, fold_convert_loc (loc, type, arg00), arg01);
7911 }
7912
7913 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7914 of the same precision, and X is an integer type not narrower than
7915 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7916 if (INTEGRAL_TYPE_P (type)
7917 && TREE_CODE (op0) == BIT_NOT_EXPR
7918 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7919 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7920 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7921 {
7922 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7923 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7924 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7925 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7926 fold_convert_loc (loc, type, tem));
7927 }
7928
7929 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7930 type of X and Y (integer types only). */
7931 if (INTEGRAL_TYPE_P (type)
7932 && TREE_CODE (op0) == MULT_EXPR
7933 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7934 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7935 {
7936 /* Be careful not to introduce new overflows. */
7937 tree mult_type;
7938 if (TYPE_OVERFLOW_WRAPS (type))
7939 mult_type = type;
7940 else
7941 mult_type = unsigned_type_for (type);
7942
7943 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7944 {
7945 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7946 fold_convert_loc (loc, mult_type,
7947 TREE_OPERAND (op0, 0)),
7948 fold_convert_loc (loc, mult_type,
7949 TREE_OPERAND (op0, 1)));
7950 return fold_convert_loc (loc, type, tem);
7951 }
7952 }
7953
7954 return NULL_TREE;
7955
7956 case VIEW_CONVERT_EXPR:
7957 if (TREE_CODE (op0) == MEM_REF)
7958 {
7959 tem = fold_build2_loc (loc, MEM_REF, type,
7960 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7961 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7962 return tem;
7963 }
7964
7965 return NULL_TREE;
7966
7967 case NEGATE_EXPR:
7968 tem = fold_negate_expr (loc, arg0);
7969 if (tem)
7970 return fold_convert_loc (loc, type, tem);
7971 return NULL_TREE;
7972
7973 case ABS_EXPR:
7974 /* Convert fabs((double)float) into (double)fabsf(float). */
7975 if (TREE_CODE (arg0) == NOP_EXPR
7976 && TREE_CODE (type) == REAL_TYPE)
7977 {
7978 tree targ0 = strip_float_extensions (arg0);
7979 if (targ0 != arg0)
7980 return fold_convert_loc (loc, type,
7981 fold_build1_loc (loc, ABS_EXPR,
7982 TREE_TYPE (targ0),
7983 targ0));
7984 }
7985 return NULL_TREE;
7986
7987 case BIT_NOT_EXPR:
7988 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7989 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7990 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7991 fold_convert_loc (loc, type,
7992 TREE_OPERAND (arg0, 0)))))
7993 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7994 fold_convert_loc (loc, type,
7995 TREE_OPERAND (arg0, 1)));
7996 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7997 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7998 fold_convert_loc (loc, type,
7999 TREE_OPERAND (arg0, 1)))))
8000 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
8001 fold_convert_loc (loc, type,
8002 TREE_OPERAND (arg0, 0)), tem);
8003
8004 return NULL_TREE;
8005
8006 case TRUTH_NOT_EXPR:
8007 /* Note that the operand of this must be an int
8008 and its values must be 0 or 1.
8009 ("true" is a fixed value perhaps depending on the language,
8010 but we don't handle values other than 1 correctly yet.) */
8011 tem = fold_truth_not_expr (loc, arg0);
8012 if (!tem)
8013 return NULL_TREE;
8014 return fold_convert_loc (loc, type, tem);
8015
8016 case INDIRECT_REF:
8017 /* Fold *&X to X if X is an lvalue. */
8018 if (TREE_CODE (op0) == ADDR_EXPR)
8019 {
8020 tree op00 = TREE_OPERAND (op0, 0);
8021 if ((TREE_CODE (op00) == VAR_DECL
8022 || TREE_CODE (op00) == PARM_DECL
8023 || TREE_CODE (op00) == RESULT_DECL)
8024 && !TREE_READONLY (op00))
8025 return op00;
8026 }
8027 return NULL_TREE;
8028
8029 default:
8030 return NULL_TREE;
8031 } /* switch (code) */
8032 }
8033
8034
8035 /* If the operation was a conversion do _not_ mark a resulting constant
8036 with TREE_OVERFLOW if the original constant was not. These conversions
8037 have implementation defined behavior and retaining the TREE_OVERFLOW
8038 flag here would confuse later passes such as VRP. */
8039 tree
8040 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
8041 tree type, tree op0)
8042 {
8043 tree res = fold_unary_loc (loc, code, type, op0);
8044 if (res
8045 && TREE_CODE (res) == INTEGER_CST
8046 && TREE_CODE (op0) == INTEGER_CST
8047 && CONVERT_EXPR_CODE_P (code))
8048 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
8049
8050 return res;
8051 }
8052
8053 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
8054 operands OP0 and OP1. LOC is the location of the resulting expression.
8055 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
8056 Return the folded expression if folding is successful. Otherwise,
8057 return NULL_TREE. */
8058 static tree
8059 fold_truth_andor (location_t loc, enum tree_code code, tree type,
8060 tree arg0, tree arg1, tree op0, tree op1)
8061 {
8062 tree tem;
8063
8064 /* We only do these simplifications if we are optimizing. */
8065 if (!optimize)
8066 return NULL_TREE;
8067
8068 /* Check for things like (A || B) && (A || C). We can convert this
8069 to A || (B && C). Note that either operator can be any of the four
8070 truth and/or operations and the transformation will still be
8071 valid. Also note that we only care about order for the
8072 ANDIF and ORIF operators. If B contains side effects, this
8073 might change the truth-value of A. */
8074 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8075 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8076 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8077 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8078 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8079 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8080 {
8081 tree a00 = TREE_OPERAND (arg0, 0);
8082 tree a01 = TREE_OPERAND (arg0, 1);
8083 tree a10 = TREE_OPERAND (arg1, 0);
8084 tree a11 = TREE_OPERAND (arg1, 1);
8085 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8086 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8087 && (code == TRUTH_AND_EXPR
8088 || code == TRUTH_OR_EXPR));
8089
8090 if (operand_equal_p (a00, a10, 0))
8091 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8092 fold_build2_loc (loc, code, type, a01, a11));
8093 else if (commutative && operand_equal_p (a00, a11, 0))
8094 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8095 fold_build2_loc (loc, code, type, a01, a10));
8096 else if (commutative && operand_equal_p (a01, a10, 0))
8097 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8098 fold_build2_loc (loc, code, type, a00, a11));
8099
8100 /* This case if tricky because we must either have commutative
8101 operators or else A10 must not have side-effects. */
8102
8103 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8104 && operand_equal_p (a01, a11, 0))
8105 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8106 fold_build2_loc (loc, code, type, a00, a10),
8107 a01);
8108 }
8109
8110 /* See if we can build a range comparison. */
8111 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8112 return tem;
8113
8114 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8115 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8116 {
8117 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8118 if (tem)
8119 return fold_build2_loc (loc, code, type, tem, arg1);
8120 }
8121
8122 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8123 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8124 {
8125 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8126 if (tem)
8127 return fold_build2_loc (loc, code, type, arg0, tem);
8128 }
8129
8130 /* Check for the possibility of merging component references. If our
8131 lhs is another similar operation, try to merge its rhs with our
8132 rhs. Then try to merge our lhs and rhs. */
8133 if (TREE_CODE (arg0) == code
8134 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8135 TREE_OPERAND (arg0, 1), arg1)))
8136 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8137
8138 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8139 return tem;
8140
8141 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8142 && (code == TRUTH_AND_EXPR
8143 || code == TRUTH_ANDIF_EXPR
8144 || code == TRUTH_OR_EXPR
8145 || code == TRUTH_ORIF_EXPR))
8146 {
8147 enum tree_code ncode, icode;
8148
8149 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8150 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8151 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8152
8153 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8154 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8155 We don't want to pack more than two leafs to a non-IF AND/OR
8156 expression.
8157 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8158 equal to IF-CODE, then we don't want to add right-hand operand.
8159 If the inner right-hand side of left-hand operand has
8160 side-effects, or isn't simple, then we can't add to it,
8161 as otherwise we might destroy if-sequence. */
8162 if (TREE_CODE (arg0) == icode
8163 && simple_operand_p_2 (arg1)
8164 /* Needed for sequence points to handle trappings, and
8165 side-effects. */
8166 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8167 {
8168 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8169 arg1);
8170 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8171 tem);
8172 }
8173 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8174 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8175 else if (TREE_CODE (arg1) == icode
8176 && simple_operand_p_2 (arg0)
8177 /* Needed for sequence points to handle trappings, and
8178 side-effects. */
8179 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8180 {
8181 tem = fold_build2_loc (loc, ncode, type,
8182 arg0, TREE_OPERAND (arg1, 0));
8183 return fold_build2_loc (loc, icode, type, tem,
8184 TREE_OPERAND (arg1, 1));
8185 }
8186 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8187 into (A OR B).
8188 For sequence point consistancy, we need to check for trapping,
8189 and side-effects. */
8190 else if (code == icode && simple_operand_p_2 (arg0)
8191 && simple_operand_p_2 (arg1))
8192 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8193 }
8194
8195 return NULL_TREE;
8196 }
8197
8198 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8199 by changing CODE to reduce the magnitude of constants involved in
8200 ARG0 of the comparison.
8201 Returns a canonicalized comparison tree if a simplification was
8202 possible, otherwise returns NULL_TREE.
8203 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8204 valid if signed overflow is undefined. */
8205
8206 static tree
8207 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8208 tree arg0, tree arg1,
8209 bool *strict_overflow_p)
8210 {
8211 enum tree_code code0 = TREE_CODE (arg0);
8212 tree t, cst0 = NULL_TREE;
8213 int sgn0;
8214
8215 /* Match A +- CST code arg1. We can change this only if overflow
8216 is undefined. */
8217 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8218 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8219 /* In principle pointers also have undefined overflow behavior,
8220 but that causes problems elsewhere. */
8221 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8222 && (code0 == MINUS_EXPR
8223 || code0 == PLUS_EXPR)
8224 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8225 return NULL_TREE;
8226
8227 /* Identify the constant in arg0 and its sign. */
8228 cst0 = TREE_OPERAND (arg0, 1);
8229 sgn0 = tree_int_cst_sgn (cst0);
8230
8231 /* Overflowed constants and zero will cause problems. */
8232 if (integer_zerop (cst0)
8233 || TREE_OVERFLOW (cst0))
8234 return NULL_TREE;
8235
8236 /* See if we can reduce the magnitude of the constant in
8237 arg0 by changing the comparison code. */
8238 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8239 if (code == LT_EXPR
8240 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8241 code = LE_EXPR;
8242 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8243 else if (code == GT_EXPR
8244 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8245 code = GE_EXPR;
8246 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8247 else if (code == LE_EXPR
8248 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8249 code = LT_EXPR;
8250 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8251 else if (code == GE_EXPR
8252 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8253 code = GT_EXPR;
8254 else
8255 return NULL_TREE;
8256 *strict_overflow_p = true;
8257
8258 /* Now build the constant reduced in magnitude. But not if that
8259 would produce one outside of its types range. */
8260 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8261 && ((sgn0 == 1
8262 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8263 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8264 || (sgn0 == -1
8265 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8266 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8267 return NULL_TREE;
8268
8269 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8270 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8271 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8272 t = fold_convert (TREE_TYPE (arg1), t);
8273
8274 return fold_build2_loc (loc, code, type, t, arg1);
8275 }
8276
8277 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8278 overflow further. Try to decrease the magnitude of constants involved
8279 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8280 and put sole constants at the second argument position.
8281 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8282
8283 static tree
8284 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8285 tree arg0, tree arg1)
8286 {
8287 tree t;
8288 bool strict_overflow_p;
8289 const char * const warnmsg = G_("assuming signed overflow does not occur "
8290 "when reducing constant in comparison");
8291
8292 /* Try canonicalization by simplifying arg0. */
8293 strict_overflow_p = false;
8294 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8295 &strict_overflow_p);
8296 if (t)
8297 {
8298 if (strict_overflow_p)
8299 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8300 return t;
8301 }
8302
8303 /* Try canonicalization by simplifying arg1 using the swapped
8304 comparison. */
8305 code = swap_tree_comparison (code);
8306 strict_overflow_p = false;
8307 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8308 &strict_overflow_p);
8309 if (t && strict_overflow_p)
8310 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8311 return t;
8312 }
8313
8314 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8315 space. This is used to avoid issuing overflow warnings for
8316 expressions like &p->x which can not wrap. */
8317
8318 static bool
8319 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8320 {
8321 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8322 return true;
8323
8324 if (bitpos < 0)
8325 return true;
8326
8327 wide_int wi_offset;
8328 int precision = TYPE_PRECISION (TREE_TYPE (base));
8329 if (offset == NULL_TREE)
8330 wi_offset = wi::zero (precision);
8331 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8332 return true;
8333 else
8334 wi_offset = offset;
8335
8336 bool overflow;
8337 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8338 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8339 if (overflow)
8340 return true;
8341
8342 if (!wi::fits_uhwi_p (total))
8343 return true;
8344
8345 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8346 if (size <= 0)
8347 return true;
8348
8349 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8350 array. */
8351 if (TREE_CODE (base) == ADDR_EXPR)
8352 {
8353 HOST_WIDE_INT base_size;
8354
8355 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8356 if (base_size > 0 && size < base_size)
8357 size = base_size;
8358 }
8359
8360 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8361 }
8362
8363 /* Return a positive integer when the symbol DECL is known to have
8364 a nonzero address, zero when it's known not to (e.g., it's a weak
8365 symbol), and a negative integer when the symbol is not yet in the
8366 symbol table and so whether or not its address is zero is unknown. */
8367 static int
8368 maybe_nonzero_address (tree decl)
8369 {
8370 if (DECL_P (decl) && decl_in_symtab_p (decl))
8371 if (struct symtab_node *symbol = symtab_node::get_create (decl))
8372 return symbol->nonzero_address ();
8373
8374 return -1;
8375 }
8376
8377 /* Subroutine of fold_binary. This routine performs all of the
8378 transformations that are common to the equality/inequality
8379 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8380 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8381 fold_binary should call fold_binary. Fold a comparison with
8382 tree code CODE and type TYPE with operands OP0 and OP1. Return
8383 the folded comparison or NULL_TREE. */
8384
8385 static tree
8386 fold_comparison (location_t loc, enum tree_code code, tree type,
8387 tree op0, tree op1)
8388 {
8389 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8390 tree arg0, arg1, tem;
8391
8392 arg0 = op0;
8393 arg1 = op1;
8394
8395 STRIP_SIGN_NOPS (arg0);
8396 STRIP_SIGN_NOPS (arg1);
8397
8398 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8399 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8400 && (equality_code
8401 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8402 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8403 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8404 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8405 && TREE_CODE (arg1) == INTEGER_CST
8406 && !TREE_OVERFLOW (arg1))
8407 {
8408 const enum tree_code
8409 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8410 tree const1 = TREE_OPERAND (arg0, 1);
8411 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8412 tree variable = TREE_OPERAND (arg0, 0);
8413 tree new_const = int_const_binop (reverse_op, const2, const1);
8414
8415 /* If the constant operation overflowed this can be
8416 simplified as a comparison against INT_MAX/INT_MIN. */
8417 if (TREE_OVERFLOW (new_const)
8418 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8419 {
8420 int const1_sgn = tree_int_cst_sgn (const1);
8421 enum tree_code code2 = code;
8422
8423 /* Get the sign of the constant on the lhs if the
8424 operation were VARIABLE + CONST1. */
8425 if (TREE_CODE (arg0) == MINUS_EXPR)
8426 const1_sgn = -const1_sgn;
8427
8428 /* The sign of the constant determines if we overflowed
8429 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8430 Canonicalize to the INT_MIN overflow by swapping the comparison
8431 if necessary. */
8432 if (const1_sgn == -1)
8433 code2 = swap_tree_comparison (code);
8434
8435 /* We now can look at the canonicalized case
8436 VARIABLE + 1 CODE2 INT_MIN
8437 and decide on the result. */
8438 switch (code2)
8439 {
8440 case EQ_EXPR:
8441 case LT_EXPR:
8442 case LE_EXPR:
8443 return
8444 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8445
8446 case NE_EXPR:
8447 case GE_EXPR:
8448 case GT_EXPR:
8449 return
8450 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8451
8452 default:
8453 gcc_unreachable ();
8454 }
8455 }
8456 else
8457 {
8458 if (!equality_code)
8459 fold_overflow_warning ("assuming signed overflow does not occur "
8460 "when changing X +- C1 cmp C2 to "
8461 "X cmp C2 -+ C1",
8462 WARN_STRICT_OVERFLOW_COMPARISON);
8463 return fold_build2_loc (loc, code, type, variable, new_const);
8464 }
8465 }
8466
8467 /* For comparisons of pointers we can decompose it to a compile time
8468 comparison of the base objects and the offsets into the object.
8469 This requires at least one operand being an ADDR_EXPR or a
8470 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8471 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8472 && (TREE_CODE (arg0) == ADDR_EXPR
8473 || TREE_CODE (arg1) == ADDR_EXPR
8474 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8475 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8476 {
8477 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8478 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8479 machine_mode mode;
8480 int volatilep, reversep, unsignedp;
8481 bool indirect_base0 = false, indirect_base1 = false;
8482
8483 /* Get base and offset for the access. Strip ADDR_EXPR for
8484 get_inner_reference, but put it back by stripping INDIRECT_REF
8485 off the base object if possible. indirect_baseN will be true
8486 if baseN is not an address but refers to the object itself. */
8487 base0 = arg0;
8488 if (TREE_CODE (arg0) == ADDR_EXPR)
8489 {
8490 base0
8491 = get_inner_reference (TREE_OPERAND (arg0, 0),
8492 &bitsize, &bitpos0, &offset0, &mode,
8493 &unsignedp, &reversep, &volatilep, false);
8494 if (TREE_CODE (base0) == INDIRECT_REF)
8495 base0 = TREE_OPERAND (base0, 0);
8496 else
8497 indirect_base0 = true;
8498 }
8499 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8500 {
8501 base0 = TREE_OPERAND (arg0, 0);
8502 STRIP_SIGN_NOPS (base0);
8503 if (TREE_CODE (base0) == ADDR_EXPR)
8504 {
8505 base0
8506 = get_inner_reference (TREE_OPERAND (base0, 0),
8507 &bitsize, &bitpos0, &offset0, &mode,
8508 &unsignedp, &reversep, &volatilep,
8509 false);
8510 if (TREE_CODE (base0) == INDIRECT_REF)
8511 base0 = TREE_OPERAND (base0, 0);
8512 else
8513 indirect_base0 = true;
8514 }
8515 if (offset0 == NULL_TREE || integer_zerop (offset0))
8516 offset0 = TREE_OPERAND (arg0, 1);
8517 else
8518 offset0 = size_binop (PLUS_EXPR, offset0,
8519 TREE_OPERAND (arg0, 1));
8520 if (TREE_CODE (offset0) == INTEGER_CST)
8521 {
8522 offset_int tem = wi::sext (wi::to_offset (offset0),
8523 TYPE_PRECISION (sizetype));
8524 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8525 tem += bitpos0;
8526 if (wi::fits_shwi_p (tem))
8527 {
8528 bitpos0 = tem.to_shwi ();
8529 offset0 = NULL_TREE;
8530 }
8531 }
8532 }
8533
8534 base1 = arg1;
8535 if (TREE_CODE (arg1) == ADDR_EXPR)
8536 {
8537 base1
8538 = get_inner_reference (TREE_OPERAND (arg1, 0),
8539 &bitsize, &bitpos1, &offset1, &mode,
8540 &unsignedp, &reversep, &volatilep, false);
8541 if (TREE_CODE (base1) == INDIRECT_REF)
8542 base1 = TREE_OPERAND (base1, 0);
8543 else
8544 indirect_base1 = true;
8545 }
8546 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8547 {
8548 base1 = TREE_OPERAND (arg1, 0);
8549 STRIP_SIGN_NOPS (base1);
8550 if (TREE_CODE (base1) == ADDR_EXPR)
8551 {
8552 base1
8553 = get_inner_reference (TREE_OPERAND (base1, 0),
8554 &bitsize, &bitpos1, &offset1, &mode,
8555 &unsignedp, &reversep, &volatilep,
8556 false);
8557 if (TREE_CODE (base1) == INDIRECT_REF)
8558 base1 = TREE_OPERAND (base1, 0);
8559 else
8560 indirect_base1 = true;
8561 }
8562 if (offset1 == NULL_TREE || integer_zerop (offset1))
8563 offset1 = TREE_OPERAND (arg1, 1);
8564 else
8565 offset1 = size_binop (PLUS_EXPR, offset1,
8566 TREE_OPERAND (arg1, 1));
8567 if (TREE_CODE (offset1) == INTEGER_CST)
8568 {
8569 offset_int tem = wi::sext (wi::to_offset (offset1),
8570 TYPE_PRECISION (sizetype));
8571 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8572 tem += bitpos1;
8573 if (wi::fits_shwi_p (tem))
8574 {
8575 bitpos1 = tem.to_shwi ();
8576 offset1 = NULL_TREE;
8577 }
8578 }
8579 }
8580
8581 /* If we have equivalent bases we might be able to simplify. */
8582 if (indirect_base0 == indirect_base1
8583 && operand_equal_p (base0, base1,
8584 indirect_base0 ? OEP_ADDRESS_OF : 0))
8585 {
8586 /* We can fold this expression to a constant if the non-constant
8587 offset parts are equal. */
8588 if ((offset0 == offset1
8589 || (offset0 && offset1
8590 && operand_equal_p (offset0, offset1, 0)))
8591 && (code == EQ_EXPR
8592 || code == NE_EXPR
8593 || (indirect_base0 && DECL_P (base0))
8594 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8595
8596 {
8597 if (!equality_code
8598 && bitpos0 != bitpos1
8599 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8600 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8601 fold_overflow_warning (("assuming pointer wraparound does not "
8602 "occur when comparing P +- C1 with "
8603 "P +- C2"),
8604 WARN_STRICT_OVERFLOW_CONDITIONAL);
8605
8606 switch (code)
8607 {
8608 case EQ_EXPR:
8609 return constant_boolean_node (bitpos0 == bitpos1, type);
8610 case NE_EXPR:
8611 return constant_boolean_node (bitpos0 != bitpos1, type);
8612 case LT_EXPR:
8613 return constant_boolean_node (bitpos0 < bitpos1, type);
8614 case LE_EXPR:
8615 return constant_boolean_node (bitpos0 <= bitpos1, type);
8616 case GE_EXPR:
8617 return constant_boolean_node (bitpos0 >= bitpos1, type);
8618 case GT_EXPR:
8619 return constant_boolean_node (bitpos0 > bitpos1, type);
8620 default:;
8621 }
8622 }
8623 /* We can simplify the comparison to a comparison of the variable
8624 offset parts if the constant offset parts are equal.
8625 Be careful to use signed sizetype here because otherwise we
8626 mess with array offsets in the wrong way. This is possible
8627 because pointer arithmetic is restricted to retain within an
8628 object and overflow on pointer differences is undefined as of
8629 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8630 else if (bitpos0 == bitpos1
8631 && (equality_code
8632 || (indirect_base0 && DECL_P (base0))
8633 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8634 {
8635 /* By converting to signed sizetype we cover middle-end pointer
8636 arithmetic which operates on unsigned pointer types of size
8637 type size and ARRAY_REF offsets which are properly sign or
8638 zero extended from their type in case it is narrower than
8639 sizetype. */
8640 if (offset0 == NULL_TREE)
8641 offset0 = build_int_cst (ssizetype, 0);
8642 else
8643 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8644 if (offset1 == NULL_TREE)
8645 offset1 = build_int_cst (ssizetype, 0);
8646 else
8647 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8648
8649 if (!equality_code
8650 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8651 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8652 fold_overflow_warning (("assuming pointer wraparound does not "
8653 "occur when comparing P +- C1 with "
8654 "P +- C2"),
8655 WARN_STRICT_OVERFLOW_COMPARISON);
8656
8657 return fold_build2_loc (loc, code, type, offset0, offset1);
8658 }
8659 }
8660 /* For equal offsets we can simplify to a comparison of the
8661 base addresses. */
8662 else if (bitpos0 == bitpos1
8663 && (indirect_base0
8664 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8665 && (indirect_base1
8666 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8667 && ((offset0 == offset1)
8668 || (offset0 && offset1
8669 && operand_equal_p (offset0, offset1, 0))))
8670 {
8671 if (indirect_base0)
8672 base0 = build_fold_addr_expr_loc (loc, base0);
8673 if (indirect_base1)
8674 base1 = build_fold_addr_expr_loc (loc, base1);
8675 return fold_build2_loc (loc, code, type, base0, base1);
8676 }
8677 /* Comparison between an ordinary (non-weak) symbol and a null
8678 pointer can be eliminated since such symbols must have a non
8679 null address. In C, relational expressions between pointers
8680 to objects and null pointers are undefined. The results
8681 below follow the C++ rules with the additional property that
8682 every object pointer compares greater than a null pointer.
8683 */
8684 else if (DECL_P (base0)
8685 && maybe_nonzero_address (base0) > 0
8686 /* Avoid folding references to struct members at offset 0 to
8687 prevent tests like '&ptr->firstmember == 0' from getting
8688 eliminated. When ptr is null, although the -> expression
8689 is strictly speaking invalid, GCC retains it as a matter
8690 of QoI. See PR c/44555. */
8691 && (offset0 == NULL_TREE && bitpos0 != 0)
8692 /* The caller guarantees that when one of the arguments is
8693 constant (i.e., null in this case) it is second. */
8694 && integer_zerop (arg1))
8695 {
8696 switch (code)
8697 {
8698 case EQ_EXPR:
8699 case LE_EXPR:
8700 case LT_EXPR:
8701 return constant_boolean_node (false, type);
8702 case GE_EXPR:
8703 case GT_EXPR:
8704 case NE_EXPR:
8705 return constant_boolean_node (true, type);
8706 default:
8707 gcc_unreachable ();
8708 }
8709 }
8710 }
8711
8712 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8713 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8714 the resulting offset is smaller in absolute value than the
8715 original one and has the same sign. */
8716 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8717 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8718 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8719 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8720 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8721 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8722 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8723 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8724 {
8725 tree const1 = TREE_OPERAND (arg0, 1);
8726 tree const2 = TREE_OPERAND (arg1, 1);
8727 tree variable1 = TREE_OPERAND (arg0, 0);
8728 tree variable2 = TREE_OPERAND (arg1, 0);
8729 tree cst;
8730 const char * const warnmsg = G_("assuming signed overflow does not "
8731 "occur when combining constants around "
8732 "a comparison");
8733
8734 /* Put the constant on the side where it doesn't overflow and is
8735 of lower absolute value and of same sign than before. */
8736 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8737 ? MINUS_EXPR : PLUS_EXPR,
8738 const2, const1);
8739 if (!TREE_OVERFLOW (cst)
8740 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8741 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8742 {
8743 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8744 return fold_build2_loc (loc, code, type,
8745 variable1,
8746 fold_build2_loc (loc, TREE_CODE (arg1),
8747 TREE_TYPE (arg1),
8748 variable2, cst));
8749 }
8750
8751 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8752 ? MINUS_EXPR : PLUS_EXPR,
8753 const1, const2);
8754 if (!TREE_OVERFLOW (cst)
8755 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8756 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8757 {
8758 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8759 return fold_build2_loc (loc, code, type,
8760 fold_build2_loc (loc, TREE_CODE (arg0),
8761 TREE_TYPE (arg0),
8762 variable1, cst),
8763 variable2);
8764 }
8765 }
8766
8767 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8768 if (tem)
8769 return tem;
8770
8771 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8772 constant, we can simplify it. */
8773 if (TREE_CODE (arg1) == INTEGER_CST
8774 && (TREE_CODE (arg0) == MIN_EXPR
8775 || TREE_CODE (arg0) == MAX_EXPR)
8776 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8777 {
8778 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8779 if (tem)
8780 return tem;
8781 }
8782
8783 /* If we are comparing an expression that just has comparisons
8784 of two integer values, arithmetic expressions of those comparisons,
8785 and constants, we can simplify it. There are only three cases
8786 to check: the two values can either be equal, the first can be
8787 greater, or the second can be greater. Fold the expression for
8788 those three values. Since each value must be 0 or 1, we have
8789 eight possibilities, each of which corresponds to the constant 0
8790 or 1 or one of the six possible comparisons.
8791
8792 This handles common cases like (a > b) == 0 but also handles
8793 expressions like ((x > y) - (y > x)) > 0, which supposedly
8794 occur in macroized code. */
8795
8796 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8797 {
8798 tree cval1 = 0, cval2 = 0;
8799 int save_p = 0;
8800
8801 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8802 /* Don't handle degenerate cases here; they should already
8803 have been handled anyway. */
8804 && cval1 != 0 && cval2 != 0
8805 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8806 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8807 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8808 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8809 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8810 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8811 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8812 {
8813 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8814 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8815
8816 /* We can't just pass T to eval_subst in case cval1 or cval2
8817 was the same as ARG1. */
8818
8819 tree high_result
8820 = fold_build2_loc (loc, code, type,
8821 eval_subst (loc, arg0, cval1, maxval,
8822 cval2, minval),
8823 arg1);
8824 tree equal_result
8825 = fold_build2_loc (loc, code, type,
8826 eval_subst (loc, arg0, cval1, maxval,
8827 cval2, maxval),
8828 arg1);
8829 tree low_result
8830 = fold_build2_loc (loc, code, type,
8831 eval_subst (loc, arg0, cval1, minval,
8832 cval2, maxval),
8833 arg1);
8834
8835 /* All three of these results should be 0 or 1. Confirm they are.
8836 Then use those values to select the proper code to use. */
8837
8838 if (TREE_CODE (high_result) == INTEGER_CST
8839 && TREE_CODE (equal_result) == INTEGER_CST
8840 && TREE_CODE (low_result) == INTEGER_CST)
8841 {
8842 /* Make a 3-bit mask with the high-order bit being the
8843 value for `>', the next for '=', and the low for '<'. */
8844 switch ((integer_onep (high_result) * 4)
8845 + (integer_onep (equal_result) * 2)
8846 + integer_onep (low_result))
8847 {
8848 case 0:
8849 /* Always false. */
8850 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8851 case 1:
8852 code = LT_EXPR;
8853 break;
8854 case 2:
8855 code = EQ_EXPR;
8856 break;
8857 case 3:
8858 code = LE_EXPR;
8859 break;
8860 case 4:
8861 code = GT_EXPR;
8862 break;
8863 case 5:
8864 code = NE_EXPR;
8865 break;
8866 case 6:
8867 code = GE_EXPR;
8868 break;
8869 case 7:
8870 /* Always true. */
8871 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8872 }
8873
8874 if (save_p)
8875 {
8876 tem = save_expr (build2 (code, type, cval1, cval2));
8877 SET_EXPR_LOCATION (tem, loc);
8878 return tem;
8879 }
8880 return fold_build2_loc (loc, code, type, cval1, cval2);
8881 }
8882 }
8883 }
8884
8885 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8886 into a single range test. */
8887 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8888 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8889 && TREE_CODE (arg1) == INTEGER_CST
8890 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8891 && !integer_zerop (TREE_OPERAND (arg0, 1))
8892 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8893 && !TREE_OVERFLOW (arg1))
8894 {
8895 tem = fold_div_compare (loc, code, type, arg0, arg1);
8896 if (tem != NULL_TREE)
8897 return tem;
8898 }
8899
8900 return NULL_TREE;
8901 }
8902
8903
8904 /* Subroutine of fold_binary. Optimize complex multiplications of the
8905 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8906 argument EXPR represents the expression "z" of type TYPE. */
8907
8908 static tree
8909 fold_mult_zconjz (location_t loc, tree type, tree expr)
8910 {
8911 tree itype = TREE_TYPE (type);
8912 tree rpart, ipart, tem;
8913
8914 if (TREE_CODE (expr) == COMPLEX_EXPR)
8915 {
8916 rpart = TREE_OPERAND (expr, 0);
8917 ipart = TREE_OPERAND (expr, 1);
8918 }
8919 else if (TREE_CODE (expr) == COMPLEX_CST)
8920 {
8921 rpart = TREE_REALPART (expr);
8922 ipart = TREE_IMAGPART (expr);
8923 }
8924 else
8925 {
8926 expr = save_expr (expr);
8927 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8928 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8929 }
8930
8931 rpart = save_expr (rpart);
8932 ipart = save_expr (ipart);
8933 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8934 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8935 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8936 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8937 build_zero_cst (itype));
8938 }
8939
8940
8941 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8942 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8943
8944 static bool
8945 vec_cst_ctor_to_array (tree arg, tree *elts)
8946 {
8947 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8948
8949 if (TREE_CODE (arg) == VECTOR_CST)
8950 {
8951 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8952 elts[i] = VECTOR_CST_ELT (arg, i);
8953 }
8954 else if (TREE_CODE (arg) == CONSTRUCTOR)
8955 {
8956 constructor_elt *elt;
8957
8958 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8959 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8960 return false;
8961 else
8962 elts[i] = elt->value;
8963 }
8964 else
8965 return false;
8966 for (; i < nelts; i++)
8967 elts[i]
8968 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8969 return true;
8970 }
8971
8972 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8973 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8974 NULL_TREE otherwise. */
8975
8976 static tree
8977 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8978 {
8979 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8980 tree *elts;
8981 bool need_ctor = false;
8982
8983 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8984 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8985 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8986 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8987 return NULL_TREE;
8988
8989 elts = XALLOCAVEC (tree, nelts * 3);
8990 if (!vec_cst_ctor_to_array (arg0, elts)
8991 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8992 return NULL_TREE;
8993
8994 for (i = 0; i < nelts; i++)
8995 {
8996 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8997 need_ctor = true;
8998 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8999 }
9000
9001 if (need_ctor)
9002 {
9003 vec<constructor_elt, va_gc> *v;
9004 vec_alloc (v, nelts);
9005 for (i = 0; i < nelts; i++)
9006 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
9007 return build_constructor (type, v);
9008 }
9009 else
9010 return build_vector (type, &elts[2 * nelts]);
9011 }
9012
9013 /* Try to fold a pointer difference of type TYPE two address expressions of
9014 array references AREF0 and AREF1 using location LOC. Return a
9015 simplified expression for the difference or NULL_TREE. */
9016
9017 static tree
9018 fold_addr_of_array_ref_difference (location_t loc, tree type,
9019 tree aref0, tree aref1)
9020 {
9021 tree base0 = TREE_OPERAND (aref0, 0);
9022 tree base1 = TREE_OPERAND (aref1, 0);
9023 tree base_offset = build_int_cst (type, 0);
9024
9025 /* If the bases are array references as well, recurse. If the bases
9026 are pointer indirections compute the difference of the pointers.
9027 If the bases are equal, we are set. */
9028 if ((TREE_CODE (base0) == ARRAY_REF
9029 && TREE_CODE (base1) == ARRAY_REF
9030 && (base_offset
9031 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
9032 || (INDIRECT_REF_P (base0)
9033 && INDIRECT_REF_P (base1)
9034 && (base_offset
9035 = fold_binary_loc (loc, MINUS_EXPR, type,
9036 fold_convert (type, TREE_OPERAND (base0, 0)),
9037 fold_convert (type,
9038 TREE_OPERAND (base1, 0)))))
9039 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
9040 {
9041 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
9042 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
9043 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
9044 tree diff = build2 (MINUS_EXPR, type, op0, op1);
9045 return fold_build2_loc (loc, PLUS_EXPR, type,
9046 base_offset,
9047 fold_build2_loc (loc, MULT_EXPR, type,
9048 diff, esz));
9049 }
9050 return NULL_TREE;
9051 }
9052
9053 /* If the real or vector real constant CST of type TYPE has an exact
9054 inverse, return it, else return NULL. */
9055
9056 tree
9057 exact_inverse (tree type, tree cst)
9058 {
9059 REAL_VALUE_TYPE r;
9060 tree unit_type, *elts;
9061 machine_mode mode;
9062 unsigned vec_nelts, i;
9063
9064 switch (TREE_CODE (cst))
9065 {
9066 case REAL_CST:
9067 r = TREE_REAL_CST (cst);
9068
9069 if (exact_real_inverse (TYPE_MODE (type), &r))
9070 return build_real (type, r);
9071
9072 return NULL_TREE;
9073
9074 case VECTOR_CST:
9075 vec_nelts = VECTOR_CST_NELTS (cst);
9076 elts = XALLOCAVEC (tree, vec_nelts);
9077 unit_type = TREE_TYPE (type);
9078 mode = TYPE_MODE (unit_type);
9079
9080 for (i = 0; i < vec_nelts; i++)
9081 {
9082 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
9083 if (!exact_real_inverse (mode, &r))
9084 return NULL_TREE;
9085 elts[i] = build_real (unit_type, r);
9086 }
9087
9088 return build_vector (type, elts);
9089
9090 default:
9091 return NULL_TREE;
9092 }
9093 }
9094
9095 /* Mask out the tz least significant bits of X of type TYPE where
9096 tz is the number of trailing zeroes in Y. */
9097 static wide_int
9098 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
9099 {
9100 int tz = wi::ctz (y);
9101 if (tz > 0)
9102 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
9103 return x;
9104 }
9105
9106 /* Return true when T is an address and is known to be nonzero.
9107 For floating point we further ensure that T is not denormal.
9108 Similar logic is present in nonzero_address in rtlanal.h.
9109
9110 If the return value is based on the assumption that signed overflow
9111 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9112 change *STRICT_OVERFLOW_P. */
9113
9114 static bool
9115 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9116 {
9117 tree type = TREE_TYPE (t);
9118 enum tree_code code;
9119
9120 /* Doing something useful for floating point would need more work. */
9121 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9122 return false;
9123
9124 code = TREE_CODE (t);
9125 switch (TREE_CODE_CLASS (code))
9126 {
9127 case tcc_unary:
9128 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9129 strict_overflow_p);
9130 case tcc_binary:
9131 case tcc_comparison:
9132 return tree_binary_nonzero_warnv_p (code, type,
9133 TREE_OPERAND (t, 0),
9134 TREE_OPERAND (t, 1),
9135 strict_overflow_p);
9136 case tcc_constant:
9137 case tcc_declaration:
9138 case tcc_reference:
9139 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9140
9141 default:
9142 break;
9143 }
9144
9145 switch (code)
9146 {
9147 case TRUTH_NOT_EXPR:
9148 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9149 strict_overflow_p);
9150
9151 case TRUTH_AND_EXPR:
9152 case TRUTH_OR_EXPR:
9153 case TRUTH_XOR_EXPR:
9154 return tree_binary_nonzero_warnv_p (code, type,
9155 TREE_OPERAND (t, 0),
9156 TREE_OPERAND (t, 1),
9157 strict_overflow_p);
9158
9159 case COND_EXPR:
9160 case CONSTRUCTOR:
9161 case OBJ_TYPE_REF:
9162 case ASSERT_EXPR:
9163 case ADDR_EXPR:
9164 case WITH_SIZE_EXPR:
9165 case SSA_NAME:
9166 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9167
9168 case COMPOUND_EXPR:
9169 case MODIFY_EXPR:
9170 case BIND_EXPR:
9171 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9172 strict_overflow_p);
9173
9174 case SAVE_EXPR:
9175 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9176 strict_overflow_p);
9177
9178 case CALL_EXPR:
9179 {
9180 tree fndecl = get_callee_fndecl (t);
9181 if (!fndecl) return false;
9182 if (flag_delete_null_pointer_checks && !flag_check_new
9183 && DECL_IS_OPERATOR_NEW (fndecl)
9184 && !TREE_NOTHROW (fndecl))
9185 return true;
9186 if (flag_delete_null_pointer_checks
9187 && lookup_attribute ("returns_nonnull",
9188 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9189 return true;
9190 return alloca_call_p (t);
9191 }
9192
9193 default:
9194 break;
9195 }
9196 return false;
9197 }
9198
9199 /* Return true when T is an address and is known to be nonzero.
9200 Handle warnings about undefined signed overflow. */
9201
9202 static bool
9203 tree_expr_nonzero_p (tree t)
9204 {
9205 bool ret, strict_overflow_p;
9206
9207 strict_overflow_p = false;
9208 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9209 if (strict_overflow_p)
9210 fold_overflow_warning (("assuming signed overflow does not occur when "
9211 "determining that expression is always "
9212 "non-zero"),
9213 WARN_STRICT_OVERFLOW_MISC);
9214 return ret;
9215 }
9216
9217 /* Return true if T is known not to be equal to an integer W. */
9218
9219 bool
9220 expr_not_equal_to (tree t, const wide_int &w)
9221 {
9222 wide_int min, max, nz;
9223 value_range_type rtype;
9224 switch (TREE_CODE (t))
9225 {
9226 case INTEGER_CST:
9227 return wi::ne_p (t, w);
9228
9229 case SSA_NAME:
9230 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9231 return false;
9232 rtype = get_range_info (t, &min, &max);
9233 if (rtype == VR_RANGE)
9234 {
9235 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9236 return true;
9237 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9238 return true;
9239 }
9240 else if (rtype == VR_ANTI_RANGE
9241 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9242 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9243 return true;
9244 /* If T has some known zero bits and W has any of those bits set,
9245 then T is known not to be equal to W. */
9246 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9247 TYPE_PRECISION (TREE_TYPE (t))), 0))
9248 return true;
9249 return false;
9250
9251 default:
9252 return false;
9253 }
9254 }
9255
9256 /* Fold a binary expression of code CODE and type TYPE with operands
9257 OP0 and OP1. LOC is the location of the resulting expression.
9258 Return the folded expression if folding is successful. Otherwise,
9259 return NULL_TREE. */
9260
9261 tree
9262 fold_binary_loc (location_t loc,
9263 enum tree_code code, tree type, tree op0, tree op1)
9264 {
9265 enum tree_code_class kind = TREE_CODE_CLASS (code);
9266 tree arg0, arg1, tem;
9267 tree t1 = NULL_TREE;
9268 bool strict_overflow_p;
9269 unsigned int prec;
9270
9271 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9272 && TREE_CODE_LENGTH (code) == 2
9273 && op0 != NULL_TREE
9274 && op1 != NULL_TREE);
9275
9276 arg0 = op0;
9277 arg1 = op1;
9278
9279 /* Strip any conversions that don't change the mode. This is
9280 safe for every expression, except for a comparison expression
9281 because its signedness is derived from its operands. So, in
9282 the latter case, only strip conversions that don't change the
9283 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9284 preserved.
9285
9286 Note that this is done as an internal manipulation within the
9287 constant folder, in order to find the simplest representation
9288 of the arguments so that their form can be studied. In any
9289 cases, the appropriate type conversions should be put back in
9290 the tree that will get out of the constant folder. */
9291
9292 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9293 {
9294 STRIP_SIGN_NOPS (arg0);
9295 STRIP_SIGN_NOPS (arg1);
9296 }
9297 else
9298 {
9299 STRIP_NOPS (arg0);
9300 STRIP_NOPS (arg1);
9301 }
9302
9303 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9304 constant but we can't do arithmetic on them. */
9305 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9306 {
9307 tem = const_binop (code, type, arg0, arg1);
9308 if (tem != NULL_TREE)
9309 {
9310 if (TREE_TYPE (tem) != type)
9311 tem = fold_convert_loc (loc, type, tem);
9312 return tem;
9313 }
9314 }
9315
9316 /* If this is a commutative operation, and ARG0 is a constant, move it
9317 to ARG1 to reduce the number of tests below. */
9318 if (commutative_tree_code (code)
9319 && tree_swap_operands_p (arg0, arg1, true))
9320 return fold_build2_loc (loc, code, type, op1, op0);
9321
9322 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9323 to ARG1 to reduce the number of tests below. */
9324 if (kind == tcc_comparison
9325 && tree_swap_operands_p (arg0, arg1, true))
9326 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9327
9328 tem = generic_simplify (loc, code, type, op0, op1);
9329 if (tem)
9330 return tem;
9331
9332 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9333
9334 First check for cases where an arithmetic operation is applied to a
9335 compound, conditional, or comparison operation. Push the arithmetic
9336 operation inside the compound or conditional to see if any folding
9337 can then be done. Convert comparison to conditional for this purpose.
9338 The also optimizes non-constant cases that used to be done in
9339 expand_expr.
9340
9341 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9342 one of the operands is a comparison and the other is a comparison, a
9343 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9344 code below would make the expression more complex. Change it to a
9345 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9346 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9347
9348 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9349 || code == EQ_EXPR || code == NE_EXPR)
9350 && TREE_CODE (type) != VECTOR_TYPE
9351 && ((truth_value_p (TREE_CODE (arg0))
9352 && (truth_value_p (TREE_CODE (arg1))
9353 || (TREE_CODE (arg1) == BIT_AND_EXPR
9354 && integer_onep (TREE_OPERAND (arg1, 1)))))
9355 || (truth_value_p (TREE_CODE (arg1))
9356 && (truth_value_p (TREE_CODE (arg0))
9357 || (TREE_CODE (arg0) == BIT_AND_EXPR
9358 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9359 {
9360 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9361 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9362 : TRUTH_XOR_EXPR,
9363 boolean_type_node,
9364 fold_convert_loc (loc, boolean_type_node, arg0),
9365 fold_convert_loc (loc, boolean_type_node, arg1));
9366
9367 if (code == EQ_EXPR)
9368 tem = invert_truthvalue_loc (loc, tem);
9369
9370 return fold_convert_loc (loc, type, tem);
9371 }
9372
9373 if (TREE_CODE_CLASS (code) == tcc_binary
9374 || TREE_CODE_CLASS (code) == tcc_comparison)
9375 {
9376 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9377 {
9378 tem = fold_build2_loc (loc, code, type,
9379 fold_convert_loc (loc, TREE_TYPE (op0),
9380 TREE_OPERAND (arg0, 1)), op1);
9381 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9382 tem);
9383 }
9384 if (TREE_CODE (arg1) == COMPOUND_EXPR
9385 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9386 {
9387 tem = fold_build2_loc (loc, code, type, op0,
9388 fold_convert_loc (loc, TREE_TYPE (op1),
9389 TREE_OPERAND (arg1, 1)));
9390 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9391 tem);
9392 }
9393
9394 if (TREE_CODE (arg0) == COND_EXPR
9395 || TREE_CODE (arg0) == VEC_COND_EXPR
9396 || COMPARISON_CLASS_P (arg0))
9397 {
9398 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9399 arg0, arg1,
9400 /*cond_first_p=*/1);
9401 if (tem != NULL_TREE)
9402 return tem;
9403 }
9404
9405 if (TREE_CODE (arg1) == COND_EXPR
9406 || TREE_CODE (arg1) == VEC_COND_EXPR
9407 || COMPARISON_CLASS_P (arg1))
9408 {
9409 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9410 arg1, arg0,
9411 /*cond_first_p=*/0);
9412 if (tem != NULL_TREE)
9413 return tem;
9414 }
9415 }
9416
9417 switch (code)
9418 {
9419 case MEM_REF:
9420 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9421 if (TREE_CODE (arg0) == ADDR_EXPR
9422 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9423 {
9424 tree iref = TREE_OPERAND (arg0, 0);
9425 return fold_build2 (MEM_REF, type,
9426 TREE_OPERAND (iref, 0),
9427 int_const_binop (PLUS_EXPR, arg1,
9428 TREE_OPERAND (iref, 1)));
9429 }
9430
9431 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9432 if (TREE_CODE (arg0) == ADDR_EXPR
9433 && handled_component_p (TREE_OPERAND (arg0, 0)))
9434 {
9435 tree base;
9436 HOST_WIDE_INT coffset;
9437 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9438 &coffset);
9439 if (!base)
9440 return NULL_TREE;
9441 return fold_build2 (MEM_REF, type,
9442 build_fold_addr_expr (base),
9443 int_const_binop (PLUS_EXPR, arg1,
9444 size_int (coffset)));
9445 }
9446
9447 return NULL_TREE;
9448
9449 case POINTER_PLUS_EXPR:
9450 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9451 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9452 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9453 return fold_convert_loc (loc, type,
9454 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9455 fold_convert_loc (loc, sizetype,
9456 arg1),
9457 fold_convert_loc (loc, sizetype,
9458 arg0)));
9459
9460 return NULL_TREE;
9461
9462 case PLUS_EXPR:
9463 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9464 {
9465 /* X + (X / CST) * -CST is X % CST. */
9466 if (TREE_CODE (arg1) == MULT_EXPR
9467 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9468 && operand_equal_p (arg0,
9469 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9470 {
9471 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9472 tree cst1 = TREE_OPERAND (arg1, 1);
9473 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9474 cst1, cst0);
9475 if (sum && integer_zerop (sum))
9476 return fold_convert_loc (loc, type,
9477 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9478 TREE_TYPE (arg0), arg0,
9479 cst0));
9480 }
9481 }
9482
9483 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9484 one. Make sure the type is not saturating and has the signedness of
9485 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9486 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9487 if ((TREE_CODE (arg0) == MULT_EXPR
9488 || TREE_CODE (arg1) == MULT_EXPR)
9489 && !TYPE_SATURATING (type)
9490 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9491 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9492 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9493 {
9494 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9495 if (tem)
9496 return tem;
9497 }
9498
9499 if (! FLOAT_TYPE_P (type))
9500 {
9501 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9502 (plus (plus (mult) (mult)) (foo)) so that we can
9503 take advantage of the factoring cases below. */
9504 if (ANY_INTEGRAL_TYPE_P (type)
9505 && TYPE_OVERFLOW_WRAPS (type)
9506 && (((TREE_CODE (arg0) == PLUS_EXPR
9507 || TREE_CODE (arg0) == MINUS_EXPR)
9508 && TREE_CODE (arg1) == MULT_EXPR)
9509 || ((TREE_CODE (arg1) == PLUS_EXPR
9510 || TREE_CODE (arg1) == MINUS_EXPR)
9511 && TREE_CODE (arg0) == MULT_EXPR)))
9512 {
9513 tree parg0, parg1, parg, marg;
9514 enum tree_code pcode;
9515
9516 if (TREE_CODE (arg1) == MULT_EXPR)
9517 parg = arg0, marg = arg1;
9518 else
9519 parg = arg1, marg = arg0;
9520 pcode = TREE_CODE (parg);
9521 parg0 = TREE_OPERAND (parg, 0);
9522 parg1 = TREE_OPERAND (parg, 1);
9523 STRIP_NOPS (parg0);
9524 STRIP_NOPS (parg1);
9525
9526 if (TREE_CODE (parg0) == MULT_EXPR
9527 && TREE_CODE (parg1) != MULT_EXPR)
9528 return fold_build2_loc (loc, pcode, type,
9529 fold_build2_loc (loc, PLUS_EXPR, type,
9530 fold_convert_loc (loc, type,
9531 parg0),
9532 fold_convert_loc (loc, type,
9533 marg)),
9534 fold_convert_loc (loc, type, parg1));
9535 if (TREE_CODE (parg0) != MULT_EXPR
9536 && TREE_CODE (parg1) == MULT_EXPR)
9537 return
9538 fold_build2_loc (loc, PLUS_EXPR, type,
9539 fold_convert_loc (loc, type, parg0),
9540 fold_build2_loc (loc, pcode, type,
9541 fold_convert_loc (loc, type, marg),
9542 fold_convert_loc (loc, type,
9543 parg1)));
9544 }
9545 }
9546 else
9547 {
9548 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9549 to __complex__ ( x, y ). This is not the same for SNaNs or
9550 if signed zeros are involved. */
9551 if (!HONOR_SNANS (element_mode (arg0))
9552 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9553 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9554 {
9555 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9556 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9557 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9558 bool arg0rz = false, arg0iz = false;
9559 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9560 || (arg0i && (arg0iz = real_zerop (arg0i))))
9561 {
9562 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9563 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9564 if (arg0rz && arg1i && real_zerop (arg1i))
9565 {
9566 tree rp = arg1r ? arg1r
9567 : build1 (REALPART_EXPR, rtype, arg1);
9568 tree ip = arg0i ? arg0i
9569 : build1 (IMAGPART_EXPR, rtype, arg0);
9570 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9571 }
9572 else if (arg0iz && arg1r && real_zerop (arg1r))
9573 {
9574 tree rp = arg0r ? arg0r
9575 : build1 (REALPART_EXPR, rtype, arg0);
9576 tree ip = arg1i ? arg1i
9577 : build1 (IMAGPART_EXPR, rtype, arg1);
9578 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9579 }
9580 }
9581 }
9582
9583 if (flag_unsafe_math_optimizations
9584 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9585 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9586 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9587 return tem;
9588
9589 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9590 We associate floats only if the user has specified
9591 -fassociative-math. */
9592 if (flag_associative_math
9593 && TREE_CODE (arg1) == PLUS_EXPR
9594 && TREE_CODE (arg0) != MULT_EXPR)
9595 {
9596 tree tree10 = TREE_OPERAND (arg1, 0);
9597 tree tree11 = TREE_OPERAND (arg1, 1);
9598 if (TREE_CODE (tree11) == MULT_EXPR
9599 && TREE_CODE (tree10) == MULT_EXPR)
9600 {
9601 tree tree0;
9602 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9603 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9604 }
9605 }
9606 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9607 We associate floats only if the user has specified
9608 -fassociative-math. */
9609 if (flag_associative_math
9610 && TREE_CODE (arg0) == PLUS_EXPR
9611 && TREE_CODE (arg1) != MULT_EXPR)
9612 {
9613 tree tree00 = TREE_OPERAND (arg0, 0);
9614 tree tree01 = TREE_OPERAND (arg0, 1);
9615 if (TREE_CODE (tree01) == MULT_EXPR
9616 && TREE_CODE (tree00) == MULT_EXPR)
9617 {
9618 tree tree0;
9619 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9620 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9621 }
9622 }
9623 }
9624
9625 bit_rotate:
9626 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9627 is a rotate of A by C1 bits. */
9628 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9629 is a rotate of A by B bits. */
9630 {
9631 enum tree_code code0, code1;
9632 tree rtype;
9633 code0 = TREE_CODE (arg0);
9634 code1 = TREE_CODE (arg1);
9635 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9636 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9637 && operand_equal_p (TREE_OPERAND (arg0, 0),
9638 TREE_OPERAND (arg1, 0), 0)
9639 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9640 TYPE_UNSIGNED (rtype))
9641 /* Only create rotates in complete modes. Other cases are not
9642 expanded properly. */
9643 && (element_precision (rtype)
9644 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9645 {
9646 tree tree01, tree11;
9647 enum tree_code code01, code11;
9648
9649 tree01 = TREE_OPERAND (arg0, 1);
9650 tree11 = TREE_OPERAND (arg1, 1);
9651 STRIP_NOPS (tree01);
9652 STRIP_NOPS (tree11);
9653 code01 = TREE_CODE (tree01);
9654 code11 = TREE_CODE (tree11);
9655 if (code01 == INTEGER_CST
9656 && code11 == INTEGER_CST
9657 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9658 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9659 {
9660 tem = build2_loc (loc, LROTATE_EXPR,
9661 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9662 TREE_OPERAND (arg0, 0),
9663 code0 == LSHIFT_EXPR
9664 ? TREE_OPERAND (arg0, 1)
9665 : TREE_OPERAND (arg1, 1));
9666 return fold_convert_loc (loc, type, tem);
9667 }
9668 else if (code11 == MINUS_EXPR)
9669 {
9670 tree tree110, tree111;
9671 tree110 = TREE_OPERAND (tree11, 0);
9672 tree111 = TREE_OPERAND (tree11, 1);
9673 STRIP_NOPS (tree110);
9674 STRIP_NOPS (tree111);
9675 if (TREE_CODE (tree110) == INTEGER_CST
9676 && 0 == compare_tree_int (tree110,
9677 element_precision
9678 (TREE_TYPE (TREE_OPERAND
9679 (arg0, 0))))
9680 && operand_equal_p (tree01, tree111, 0))
9681 return
9682 fold_convert_loc (loc, type,
9683 build2 ((code0 == LSHIFT_EXPR
9684 ? LROTATE_EXPR
9685 : RROTATE_EXPR),
9686 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9687 TREE_OPERAND (arg0, 0),
9688 TREE_OPERAND (arg0, 1)));
9689 }
9690 else if (code01 == MINUS_EXPR)
9691 {
9692 tree tree010, tree011;
9693 tree010 = TREE_OPERAND (tree01, 0);
9694 tree011 = TREE_OPERAND (tree01, 1);
9695 STRIP_NOPS (tree010);
9696 STRIP_NOPS (tree011);
9697 if (TREE_CODE (tree010) == INTEGER_CST
9698 && 0 == compare_tree_int (tree010,
9699 element_precision
9700 (TREE_TYPE (TREE_OPERAND
9701 (arg0, 0))))
9702 && operand_equal_p (tree11, tree011, 0))
9703 return fold_convert_loc
9704 (loc, type,
9705 build2 ((code0 != LSHIFT_EXPR
9706 ? LROTATE_EXPR
9707 : RROTATE_EXPR),
9708 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9709 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9710 }
9711 }
9712 }
9713
9714 associate:
9715 /* In most languages, can't associate operations on floats through
9716 parentheses. Rather than remember where the parentheses were, we
9717 don't associate floats at all, unless the user has specified
9718 -fassociative-math.
9719 And, we need to make sure type is not saturating. */
9720
9721 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9722 && !TYPE_SATURATING (type))
9723 {
9724 tree var0, con0, lit0, minus_lit0;
9725 tree var1, con1, lit1, minus_lit1;
9726 tree atype = type;
9727 bool ok = true;
9728
9729 /* Split both trees into variables, constants, and literals. Then
9730 associate each group together, the constants with literals,
9731 then the result with variables. This increases the chances of
9732 literals being recombined later and of generating relocatable
9733 expressions for the sum of a constant and literal. */
9734 var0 = split_tree (loc, arg0, type, code,
9735 &con0, &lit0, &minus_lit0, 0);
9736 var1 = split_tree (loc, arg1, type, code,
9737 &con1, &lit1, &minus_lit1, code == MINUS_EXPR);
9738
9739 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9740 if (code == MINUS_EXPR)
9741 code = PLUS_EXPR;
9742
9743 /* With undefined overflow prefer doing association in a type
9744 which wraps on overflow, if that is one of the operand types. */
9745 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9746 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9747 {
9748 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9749 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9750 atype = TREE_TYPE (arg0);
9751 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9752 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9753 atype = TREE_TYPE (arg1);
9754 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9755 }
9756
9757 /* With undefined overflow we can only associate constants with one
9758 variable, and constants whose association doesn't overflow. */
9759 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9760 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9761 {
9762 if (var0 && var1)
9763 {
9764 tree tmp0 = var0;
9765 tree tmp1 = var1;
9766 bool one_neg = false;
9767
9768 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9769 {
9770 tmp0 = TREE_OPERAND (tmp0, 0);
9771 one_neg = !one_neg;
9772 }
9773 if (CONVERT_EXPR_P (tmp0)
9774 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9775 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9776 <= TYPE_PRECISION (atype)))
9777 tmp0 = TREE_OPERAND (tmp0, 0);
9778 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9779 {
9780 tmp1 = TREE_OPERAND (tmp1, 0);
9781 one_neg = !one_neg;
9782 }
9783 if (CONVERT_EXPR_P (tmp1)
9784 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9785 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9786 <= TYPE_PRECISION (atype)))
9787 tmp1 = TREE_OPERAND (tmp1, 0);
9788 /* The only case we can still associate with two variables
9789 is if they cancel out. */
9790 if (!one_neg
9791 || !operand_equal_p (tmp0, tmp1, 0))
9792 ok = false;
9793 }
9794 }
9795
9796 /* Only do something if we found more than two objects. Otherwise,
9797 nothing has changed and we risk infinite recursion. */
9798 if (ok
9799 && (2 < ((var0 != 0) + (var1 != 0)
9800 + (con0 != 0) + (con1 != 0)
9801 + (lit0 != 0) + (lit1 != 0)
9802 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9803 {
9804 bool any_overflows = false;
9805 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9806 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9807 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9808 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9809 var0 = associate_trees (loc, var0, var1, code, atype);
9810 con0 = associate_trees (loc, con0, con1, code, atype);
9811 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9812 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9813 code, atype);
9814
9815 /* Preserve the MINUS_EXPR if the negative part of the literal is
9816 greater than the positive part. Otherwise, the multiplicative
9817 folding code (i.e extract_muldiv) may be fooled in case
9818 unsigned constants are subtracted, like in the following
9819 example: ((X*2 + 4) - 8U)/2. */
9820 if (minus_lit0 && lit0)
9821 {
9822 if (TREE_CODE (lit0) == INTEGER_CST
9823 && TREE_CODE (minus_lit0) == INTEGER_CST
9824 && tree_int_cst_lt (lit0, minus_lit0))
9825 {
9826 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9827 MINUS_EXPR, atype);
9828 lit0 = 0;
9829 }
9830 else
9831 {
9832 lit0 = associate_trees (loc, lit0, minus_lit0,
9833 MINUS_EXPR, atype);
9834 minus_lit0 = 0;
9835 }
9836 }
9837
9838 /* Don't introduce overflows through reassociation. */
9839 if (!any_overflows
9840 && ((lit0 && TREE_OVERFLOW_P (lit0))
9841 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9842 return NULL_TREE;
9843
9844 if (minus_lit0)
9845 {
9846 if (con0 == 0)
9847 return
9848 fold_convert_loc (loc, type,
9849 associate_trees (loc, var0, minus_lit0,
9850 MINUS_EXPR, atype));
9851 else
9852 {
9853 con0 = associate_trees (loc, con0, minus_lit0,
9854 MINUS_EXPR, atype);
9855 return
9856 fold_convert_loc (loc, type,
9857 associate_trees (loc, var0, con0,
9858 PLUS_EXPR, atype));
9859 }
9860 }
9861
9862 con0 = associate_trees (loc, con0, lit0, code, atype);
9863 return
9864 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9865 code, atype));
9866 }
9867 }
9868
9869 return NULL_TREE;
9870
9871 case MINUS_EXPR:
9872 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9873 if (TREE_CODE (arg0) == NEGATE_EXPR
9874 && negate_expr_p (op1)
9875 && reorder_operands_p (arg0, arg1))
9876 return fold_build2_loc (loc, MINUS_EXPR, type,
9877 negate_expr (op1),
9878 fold_convert_loc (loc, type,
9879 TREE_OPERAND (arg0, 0)));
9880
9881 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9882 __complex__ ( x, -y ). This is not the same for SNaNs or if
9883 signed zeros are involved. */
9884 if (!HONOR_SNANS (element_mode (arg0))
9885 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9886 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9887 {
9888 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9889 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9890 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9891 bool arg0rz = false, arg0iz = false;
9892 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9893 || (arg0i && (arg0iz = real_zerop (arg0i))))
9894 {
9895 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9896 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9897 if (arg0rz && arg1i && real_zerop (arg1i))
9898 {
9899 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9900 arg1r ? arg1r
9901 : build1 (REALPART_EXPR, rtype, arg1));
9902 tree ip = arg0i ? arg0i
9903 : build1 (IMAGPART_EXPR, rtype, arg0);
9904 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9905 }
9906 else if (arg0iz && arg1r && real_zerop (arg1r))
9907 {
9908 tree rp = arg0r ? arg0r
9909 : build1 (REALPART_EXPR, rtype, arg0);
9910 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9911 arg1i ? arg1i
9912 : build1 (IMAGPART_EXPR, rtype, arg1));
9913 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9914 }
9915 }
9916 }
9917
9918 /* A - B -> A + (-B) if B is easily negatable. */
9919 if (negate_expr_p (op1)
9920 && ! TYPE_OVERFLOW_SANITIZED (type)
9921 && ((FLOAT_TYPE_P (type)
9922 /* Avoid this transformation if B is a positive REAL_CST. */
9923 && (TREE_CODE (op1) != REAL_CST
9924 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9925 || INTEGRAL_TYPE_P (type)))
9926 return fold_build2_loc (loc, PLUS_EXPR, type,
9927 fold_convert_loc (loc, type, arg0),
9928 negate_expr (op1));
9929
9930 /* Fold &a[i] - &a[j] to i-j. */
9931 if (TREE_CODE (arg0) == ADDR_EXPR
9932 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9933 && TREE_CODE (arg1) == ADDR_EXPR
9934 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9935 {
9936 tree tem = fold_addr_of_array_ref_difference (loc, type,
9937 TREE_OPERAND (arg0, 0),
9938 TREE_OPERAND (arg1, 0));
9939 if (tem)
9940 return tem;
9941 }
9942
9943 if (FLOAT_TYPE_P (type)
9944 && flag_unsafe_math_optimizations
9945 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9946 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9947 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9948 return tem;
9949
9950 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9951 one. Make sure the type is not saturating and has the signedness of
9952 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9953 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9954 if ((TREE_CODE (arg0) == MULT_EXPR
9955 || TREE_CODE (arg1) == MULT_EXPR)
9956 && !TYPE_SATURATING (type)
9957 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9958 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9959 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9960 {
9961 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9962 if (tem)
9963 return tem;
9964 }
9965
9966 goto associate;
9967
9968 case MULT_EXPR:
9969 if (! FLOAT_TYPE_P (type))
9970 {
9971 /* Transform x * -C into -x * C if x is easily negatable. */
9972 if (TREE_CODE (op1) == INTEGER_CST
9973 && tree_int_cst_sgn (op1) == -1
9974 && negate_expr_p (op0)
9975 && (tem = negate_expr (op1)) != op1
9976 && ! TREE_OVERFLOW (tem))
9977 return fold_build2_loc (loc, MULT_EXPR, type,
9978 fold_convert_loc (loc, type,
9979 negate_expr (op0)), tem);
9980
9981 strict_overflow_p = false;
9982 if (TREE_CODE (arg1) == INTEGER_CST
9983 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9984 &strict_overflow_p)))
9985 {
9986 if (strict_overflow_p)
9987 fold_overflow_warning (("assuming signed overflow does not "
9988 "occur when simplifying "
9989 "multiplication"),
9990 WARN_STRICT_OVERFLOW_MISC);
9991 return fold_convert_loc (loc, type, tem);
9992 }
9993
9994 /* Optimize z * conj(z) for integer complex numbers. */
9995 if (TREE_CODE (arg0) == CONJ_EXPR
9996 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9997 return fold_mult_zconjz (loc, type, arg1);
9998 if (TREE_CODE (arg1) == CONJ_EXPR
9999 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10000 return fold_mult_zconjz (loc, type, arg0);
10001 }
10002 else
10003 {
10004 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
10005 This is not the same for NaNs or if signed zeros are
10006 involved. */
10007 if (!HONOR_NANS (arg0)
10008 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
10009 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
10010 && TREE_CODE (arg1) == COMPLEX_CST
10011 && real_zerop (TREE_REALPART (arg1)))
10012 {
10013 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
10014 if (real_onep (TREE_IMAGPART (arg1)))
10015 return
10016 fold_build2_loc (loc, COMPLEX_EXPR, type,
10017 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
10018 rtype, arg0)),
10019 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
10020 else if (real_minus_onep (TREE_IMAGPART (arg1)))
10021 return
10022 fold_build2_loc (loc, COMPLEX_EXPR, type,
10023 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
10024 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
10025 rtype, arg0)));
10026 }
10027
10028 /* Optimize z * conj(z) for floating point complex numbers.
10029 Guarded by flag_unsafe_math_optimizations as non-finite
10030 imaginary components don't produce scalar results. */
10031 if (flag_unsafe_math_optimizations
10032 && TREE_CODE (arg0) == CONJ_EXPR
10033 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10034 return fold_mult_zconjz (loc, type, arg1);
10035 if (flag_unsafe_math_optimizations
10036 && TREE_CODE (arg1) == CONJ_EXPR
10037 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10038 return fold_mult_zconjz (loc, type, arg0);
10039 }
10040 goto associate;
10041
10042 case BIT_IOR_EXPR:
10043 /* Canonicalize (X & C1) | C2. */
10044 if (TREE_CODE (arg0) == BIT_AND_EXPR
10045 && TREE_CODE (arg1) == INTEGER_CST
10046 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10047 {
10048 int width = TYPE_PRECISION (type), w;
10049 wide_int c1 = TREE_OPERAND (arg0, 1);
10050 wide_int c2 = arg1;
10051
10052 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
10053 if ((c1 & c2) == c1)
10054 return omit_one_operand_loc (loc, type, arg1,
10055 TREE_OPERAND (arg0, 0));
10056
10057 wide_int msk = wi::mask (width, false,
10058 TYPE_PRECISION (TREE_TYPE (arg1)));
10059
10060 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
10061 if (msk.and_not (c1 | c2) == 0)
10062 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10063 TREE_OPERAND (arg0, 0), arg1);
10064
10065 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
10066 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
10067 mode which allows further optimizations. */
10068 c1 &= msk;
10069 c2 &= msk;
10070 wide_int c3 = c1.and_not (c2);
10071 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
10072 {
10073 wide_int mask = wi::mask (w, false,
10074 TYPE_PRECISION (type));
10075 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10076 {
10077 c3 = mask;
10078 break;
10079 }
10080 }
10081
10082 if (c3 != c1)
10083 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10084 fold_build2_loc (loc, BIT_AND_EXPR, type,
10085 TREE_OPERAND (arg0, 0),
10086 wide_int_to_tree (type,
10087 c3)),
10088 arg1);
10089 }
10090
10091 /* See if this can be simplified into a rotate first. If that
10092 is unsuccessful continue in the association code. */
10093 goto bit_rotate;
10094
10095 case BIT_XOR_EXPR:
10096 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10097 if (TREE_CODE (arg0) == BIT_AND_EXPR
10098 && INTEGRAL_TYPE_P (type)
10099 && integer_onep (TREE_OPERAND (arg0, 1))
10100 && integer_onep (arg1))
10101 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10102 build_zero_cst (TREE_TYPE (arg0)));
10103
10104 /* See if this can be simplified into a rotate first. If that
10105 is unsuccessful continue in the association code. */
10106 goto bit_rotate;
10107
10108 case BIT_AND_EXPR:
10109 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10110 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10111 && INTEGRAL_TYPE_P (type)
10112 && integer_onep (TREE_OPERAND (arg0, 1))
10113 && integer_onep (arg1))
10114 {
10115 tree tem2;
10116 tem = TREE_OPERAND (arg0, 0);
10117 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10118 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10119 tem, tem2);
10120 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10121 build_zero_cst (TREE_TYPE (tem)));
10122 }
10123 /* Fold ~X & 1 as (X & 1) == 0. */
10124 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10125 && INTEGRAL_TYPE_P (type)
10126 && integer_onep (arg1))
10127 {
10128 tree tem2;
10129 tem = TREE_OPERAND (arg0, 0);
10130 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10131 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10132 tem, tem2);
10133 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10134 build_zero_cst (TREE_TYPE (tem)));
10135 }
10136 /* Fold !X & 1 as X == 0. */
10137 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10138 && integer_onep (arg1))
10139 {
10140 tem = TREE_OPERAND (arg0, 0);
10141 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10142 build_zero_cst (TREE_TYPE (tem)));
10143 }
10144
10145 /* Fold (X ^ Y) & Y as ~X & Y. */
10146 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10147 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10148 {
10149 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10150 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10151 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10152 fold_convert_loc (loc, type, arg1));
10153 }
10154 /* Fold (X ^ Y) & X as ~Y & X. */
10155 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10156 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10157 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10158 {
10159 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10160 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10161 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10162 fold_convert_loc (loc, type, arg1));
10163 }
10164 /* Fold X & (X ^ Y) as X & ~Y. */
10165 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10166 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10167 {
10168 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10169 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10170 fold_convert_loc (loc, type, arg0),
10171 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10172 }
10173 /* Fold X & (Y ^ X) as ~Y & X. */
10174 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10175 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10176 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10177 {
10178 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10179 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10180 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10181 fold_convert_loc (loc, type, arg0));
10182 }
10183
10184 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10185 multiple of 1 << CST. */
10186 if (TREE_CODE (arg1) == INTEGER_CST)
10187 {
10188 wide_int cst1 = arg1;
10189 wide_int ncst1 = -cst1;
10190 if ((cst1 & ncst1) == ncst1
10191 && multiple_of_p (type, arg0,
10192 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10193 return fold_convert_loc (loc, type, arg0);
10194 }
10195
10196 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10197 bits from CST2. */
10198 if (TREE_CODE (arg1) == INTEGER_CST
10199 && TREE_CODE (arg0) == MULT_EXPR
10200 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10201 {
10202 wide_int warg1 = arg1;
10203 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10204
10205 if (masked == 0)
10206 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10207 arg0, arg1);
10208 else if (masked != warg1)
10209 {
10210 /* Avoid the transform if arg1 is a mask of some
10211 mode which allows further optimizations. */
10212 int pop = wi::popcount (warg1);
10213 if (!(pop >= BITS_PER_UNIT
10214 && exact_log2 (pop) != -1
10215 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10216 return fold_build2_loc (loc, code, type, op0,
10217 wide_int_to_tree (type, masked));
10218 }
10219 }
10220
10221 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10222 ((A & N) + B) & M -> (A + B) & M
10223 Similarly if (N & M) == 0,
10224 ((A | N) + B) & M -> (A + B) & M
10225 and for - instead of + (or unary - instead of +)
10226 and/or ^ instead of |.
10227 If B is constant and (B & M) == 0, fold into A & M. */
10228 if (TREE_CODE (arg1) == INTEGER_CST)
10229 {
10230 wide_int cst1 = arg1;
10231 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10232 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10233 && (TREE_CODE (arg0) == PLUS_EXPR
10234 || TREE_CODE (arg0) == MINUS_EXPR
10235 || TREE_CODE (arg0) == NEGATE_EXPR)
10236 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10237 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10238 {
10239 tree pmop[2];
10240 int which = 0;
10241 wide_int cst0;
10242
10243 /* Now we know that arg0 is (C + D) or (C - D) or
10244 -C and arg1 (M) is == (1LL << cst) - 1.
10245 Store C into PMOP[0] and D into PMOP[1]. */
10246 pmop[0] = TREE_OPERAND (arg0, 0);
10247 pmop[1] = NULL;
10248 if (TREE_CODE (arg0) != NEGATE_EXPR)
10249 {
10250 pmop[1] = TREE_OPERAND (arg0, 1);
10251 which = 1;
10252 }
10253
10254 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10255 which = -1;
10256
10257 for (; which >= 0; which--)
10258 switch (TREE_CODE (pmop[which]))
10259 {
10260 case BIT_AND_EXPR:
10261 case BIT_IOR_EXPR:
10262 case BIT_XOR_EXPR:
10263 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10264 != INTEGER_CST)
10265 break;
10266 cst0 = TREE_OPERAND (pmop[which], 1);
10267 cst0 &= cst1;
10268 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10269 {
10270 if (cst0 != cst1)
10271 break;
10272 }
10273 else if (cst0 != 0)
10274 break;
10275 /* If C or D is of the form (A & N) where
10276 (N & M) == M, or of the form (A | N) or
10277 (A ^ N) where (N & M) == 0, replace it with A. */
10278 pmop[which] = TREE_OPERAND (pmop[which], 0);
10279 break;
10280 case INTEGER_CST:
10281 /* If C or D is a N where (N & M) == 0, it can be
10282 omitted (assumed 0). */
10283 if ((TREE_CODE (arg0) == PLUS_EXPR
10284 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10285 && (cst1 & pmop[which]) == 0)
10286 pmop[which] = NULL;
10287 break;
10288 default:
10289 break;
10290 }
10291
10292 /* Only build anything new if we optimized one or both arguments
10293 above. */
10294 if (pmop[0] != TREE_OPERAND (arg0, 0)
10295 || (TREE_CODE (arg0) != NEGATE_EXPR
10296 && pmop[1] != TREE_OPERAND (arg0, 1)))
10297 {
10298 tree utype = TREE_TYPE (arg0);
10299 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10300 {
10301 /* Perform the operations in a type that has defined
10302 overflow behavior. */
10303 utype = unsigned_type_for (TREE_TYPE (arg0));
10304 if (pmop[0] != NULL)
10305 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10306 if (pmop[1] != NULL)
10307 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10308 }
10309
10310 if (TREE_CODE (arg0) == NEGATE_EXPR)
10311 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10312 else if (TREE_CODE (arg0) == PLUS_EXPR)
10313 {
10314 if (pmop[0] != NULL && pmop[1] != NULL)
10315 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10316 pmop[0], pmop[1]);
10317 else if (pmop[0] != NULL)
10318 tem = pmop[0];
10319 else if (pmop[1] != NULL)
10320 tem = pmop[1];
10321 else
10322 return build_int_cst (type, 0);
10323 }
10324 else if (pmop[0] == NULL)
10325 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10326 else
10327 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10328 pmop[0], pmop[1]);
10329 /* TEM is now the new binary +, - or unary - replacement. */
10330 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10331 fold_convert_loc (loc, utype, arg1));
10332 return fold_convert_loc (loc, type, tem);
10333 }
10334 }
10335 }
10336
10337 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10338 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10339 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10340 {
10341 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10342
10343 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10344 if (mask == -1)
10345 return
10346 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10347 }
10348
10349 goto associate;
10350
10351 case RDIV_EXPR:
10352 /* Don't touch a floating-point divide by zero unless the mode
10353 of the constant can represent infinity. */
10354 if (TREE_CODE (arg1) == REAL_CST
10355 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10356 && real_zerop (arg1))
10357 return NULL_TREE;
10358
10359 /* (-A) / (-B) -> A / B */
10360 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10361 return fold_build2_loc (loc, RDIV_EXPR, type,
10362 TREE_OPERAND (arg0, 0),
10363 negate_expr (arg1));
10364 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10365 return fold_build2_loc (loc, RDIV_EXPR, type,
10366 negate_expr (arg0),
10367 TREE_OPERAND (arg1, 0));
10368 return NULL_TREE;
10369
10370 case TRUNC_DIV_EXPR:
10371 /* Fall through */
10372
10373 case FLOOR_DIV_EXPR:
10374 /* Simplify A / (B << N) where A and B are positive and B is
10375 a power of 2, to A >> (N + log2(B)). */
10376 strict_overflow_p = false;
10377 if (TREE_CODE (arg1) == LSHIFT_EXPR
10378 && (TYPE_UNSIGNED (type)
10379 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10380 {
10381 tree sval = TREE_OPERAND (arg1, 0);
10382 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10383 {
10384 tree sh_cnt = TREE_OPERAND (arg1, 1);
10385 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10386 wi::exact_log2 (sval));
10387
10388 if (strict_overflow_p)
10389 fold_overflow_warning (("assuming signed overflow does not "
10390 "occur when simplifying A / (B << N)"),
10391 WARN_STRICT_OVERFLOW_MISC);
10392
10393 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10394 sh_cnt, pow2);
10395 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10396 fold_convert_loc (loc, type, arg0), sh_cnt);
10397 }
10398 }
10399
10400 /* Fall through */
10401
10402 case ROUND_DIV_EXPR:
10403 case CEIL_DIV_EXPR:
10404 case EXACT_DIV_EXPR:
10405 if (integer_zerop (arg1))
10406 return NULL_TREE;
10407
10408 /* Convert -A / -B to A / B when the type is signed and overflow is
10409 undefined. */
10410 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10411 && TREE_CODE (arg0) == NEGATE_EXPR
10412 && negate_expr_p (op1))
10413 {
10414 if (INTEGRAL_TYPE_P (type))
10415 fold_overflow_warning (("assuming signed overflow does not occur "
10416 "when distributing negation across "
10417 "division"),
10418 WARN_STRICT_OVERFLOW_MISC);
10419 return fold_build2_loc (loc, code, type,
10420 fold_convert_loc (loc, type,
10421 TREE_OPERAND (arg0, 0)),
10422 negate_expr (op1));
10423 }
10424 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10425 && TREE_CODE (arg1) == NEGATE_EXPR
10426 && negate_expr_p (op0))
10427 {
10428 if (INTEGRAL_TYPE_P (type))
10429 fold_overflow_warning (("assuming signed overflow does not occur "
10430 "when distributing negation across "
10431 "division"),
10432 WARN_STRICT_OVERFLOW_MISC);
10433 return fold_build2_loc (loc, code, type,
10434 negate_expr (op0),
10435 fold_convert_loc (loc, type,
10436 TREE_OPERAND (arg1, 0)));
10437 }
10438
10439 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10440 operation, EXACT_DIV_EXPR.
10441
10442 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10443 At one time others generated faster code, it's not clear if they do
10444 after the last round to changes to the DIV code in expmed.c. */
10445 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10446 && multiple_of_p (type, arg0, arg1))
10447 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10448 fold_convert (type, arg0),
10449 fold_convert (type, arg1));
10450
10451 strict_overflow_p = false;
10452 if (TREE_CODE (arg1) == INTEGER_CST
10453 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10454 &strict_overflow_p)))
10455 {
10456 if (strict_overflow_p)
10457 fold_overflow_warning (("assuming signed overflow does not occur "
10458 "when simplifying division"),
10459 WARN_STRICT_OVERFLOW_MISC);
10460 return fold_convert_loc (loc, type, tem);
10461 }
10462
10463 return NULL_TREE;
10464
10465 case CEIL_MOD_EXPR:
10466 case FLOOR_MOD_EXPR:
10467 case ROUND_MOD_EXPR:
10468 case TRUNC_MOD_EXPR:
10469 strict_overflow_p = false;
10470 if (TREE_CODE (arg1) == INTEGER_CST
10471 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10472 &strict_overflow_p)))
10473 {
10474 if (strict_overflow_p)
10475 fold_overflow_warning (("assuming signed overflow does not occur "
10476 "when simplifying modulus"),
10477 WARN_STRICT_OVERFLOW_MISC);
10478 return fold_convert_loc (loc, type, tem);
10479 }
10480
10481 return NULL_TREE;
10482
10483 case LROTATE_EXPR:
10484 case RROTATE_EXPR:
10485 case RSHIFT_EXPR:
10486 case LSHIFT_EXPR:
10487 /* Since negative shift count is not well-defined,
10488 don't try to compute it in the compiler. */
10489 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10490 return NULL_TREE;
10491
10492 prec = element_precision (type);
10493
10494 /* If we have a rotate of a bit operation with the rotate count and
10495 the second operand of the bit operation both constant,
10496 permute the two operations. */
10497 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10498 && (TREE_CODE (arg0) == BIT_AND_EXPR
10499 || TREE_CODE (arg0) == BIT_IOR_EXPR
10500 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10501 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10502 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10503 fold_build2_loc (loc, code, type,
10504 TREE_OPERAND (arg0, 0), arg1),
10505 fold_build2_loc (loc, code, type,
10506 TREE_OPERAND (arg0, 1), arg1));
10507
10508 /* Two consecutive rotates adding up to the some integer
10509 multiple of the precision of the type can be ignored. */
10510 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10511 && TREE_CODE (arg0) == RROTATE_EXPR
10512 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10513 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10514 prec) == 0)
10515 return TREE_OPERAND (arg0, 0);
10516
10517 return NULL_TREE;
10518
10519 case MIN_EXPR:
10520 case MAX_EXPR:
10521 goto associate;
10522
10523 case TRUTH_ANDIF_EXPR:
10524 /* Note that the operands of this must be ints
10525 and their values must be 0 or 1.
10526 ("true" is a fixed value perhaps depending on the language.) */
10527 /* If first arg is constant zero, return it. */
10528 if (integer_zerop (arg0))
10529 return fold_convert_loc (loc, type, arg0);
10530 case TRUTH_AND_EXPR:
10531 /* If either arg is constant true, drop it. */
10532 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10533 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10534 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10535 /* Preserve sequence points. */
10536 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10537 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10538 /* If second arg is constant zero, result is zero, but first arg
10539 must be evaluated. */
10540 if (integer_zerop (arg1))
10541 return omit_one_operand_loc (loc, type, arg1, arg0);
10542 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10543 case will be handled here. */
10544 if (integer_zerop (arg0))
10545 return omit_one_operand_loc (loc, type, arg0, arg1);
10546
10547 /* !X && X is always false. */
10548 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10549 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10550 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10551 /* X && !X is always false. */
10552 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10553 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10554 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10555
10556 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10557 means A >= Y && A != MAX, but in this case we know that
10558 A < X <= MAX. */
10559
10560 if (!TREE_SIDE_EFFECTS (arg0)
10561 && !TREE_SIDE_EFFECTS (arg1))
10562 {
10563 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10564 if (tem && !operand_equal_p (tem, arg0, 0))
10565 return fold_build2_loc (loc, code, type, tem, arg1);
10566
10567 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10568 if (tem && !operand_equal_p (tem, arg1, 0))
10569 return fold_build2_loc (loc, code, type, arg0, tem);
10570 }
10571
10572 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10573 != NULL_TREE)
10574 return tem;
10575
10576 return NULL_TREE;
10577
10578 case TRUTH_ORIF_EXPR:
10579 /* Note that the operands of this must be ints
10580 and their values must be 0 or true.
10581 ("true" is a fixed value perhaps depending on the language.) */
10582 /* If first arg is constant true, return it. */
10583 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10584 return fold_convert_loc (loc, type, arg0);
10585 case TRUTH_OR_EXPR:
10586 /* If either arg is constant zero, drop it. */
10587 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10588 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10589 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10590 /* Preserve sequence points. */
10591 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10592 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10593 /* If second arg is constant true, result is true, but we must
10594 evaluate first arg. */
10595 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10596 return omit_one_operand_loc (loc, type, arg1, arg0);
10597 /* Likewise for first arg, but note this only occurs here for
10598 TRUTH_OR_EXPR. */
10599 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10600 return omit_one_operand_loc (loc, type, arg0, arg1);
10601
10602 /* !X || X is always true. */
10603 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10604 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10605 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10606 /* X || !X is always true. */
10607 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10608 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10609 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10610
10611 /* (X && !Y) || (!X && Y) is X ^ Y */
10612 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10613 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10614 {
10615 tree a0, a1, l0, l1, n0, n1;
10616
10617 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10618 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10619
10620 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10621 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10622
10623 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10624 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10625
10626 if ((operand_equal_p (n0, a0, 0)
10627 && operand_equal_p (n1, a1, 0))
10628 || (operand_equal_p (n0, a1, 0)
10629 && operand_equal_p (n1, a0, 0)))
10630 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10631 }
10632
10633 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10634 != NULL_TREE)
10635 return tem;
10636
10637 return NULL_TREE;
10638
10639 case TRUTH_XOR_EXPR:
10640 /* If the second arg is constant zero, drop it. */
10641 if (integer_zerop (arg1))
10642 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10643 /* If the second arg is constant true, this is a logical inversion. */
10644 if (integer_onep (arg1))
10645 {
10646 tem = invert_truthvalue_loc (loc, arg0);
10647 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10648 }
10649 /* Identical arguments cancel to zero. */
10650 if (operand_equal_p (arg0, arg1, 0))
10651 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10652
10653 /* !X ^ X is always true. */
10654 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10655 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10656 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10657
10658 /* X ^ !X is always true. */
10659 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10660 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10661 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10662
10663 return NULL_TREE;
10664
10665 case EQ_EXPR:
10666 case NE_EXPR:
10667 STRIP_NOPS (arg0);
10668 STRIP_NOPS (arg1);
10669
10670 tem = fold_comparison (loc, code, type, op0, op1);
10671 if (tem != NULL_TREE)
10672 return tem;
10673
10674 /* bool_var != 1 becomes !bool_var. */
10675 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10676 && code == NE_EXPR)
10677 return fold_convert_loc (loc, type,
10678 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10679 TREE_TYPE (arg0), arg0));
10680
10681 /* bool_var == 0 becomes !bool_var. */
10682 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10683 && code == EQ_EXPR)
10684 return fold_convert_loc (loc, type,
10685 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10686 TREE_TYPE (arg0), arg0));
10687
10688 /* !exp != 0 becomes !exp */
10689 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10690 && code == NE_EXPR)
10691 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10692
10693 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10694 if ((TREE_CODE (arg0) == PLUS_EXPR
10695 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10696 || TREE_CODE (arg0) == MINUS_EXPR)
10697 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10698 0)),
10699 arg1, 0)
10700 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10701 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10702 {
10703 tree val = TREE_OPERAND (arg0, 1);
10704 val = fold_build2_loc (loc, code, type, val,
10705 build_int_cst (TREE_TYPE (val), 0));
10706 return omit_two_operands_loc (loc, type, val,
10707 TREE_OPERAND (arg0, 0), arg1);
10708 }
10709
10710 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10711 if ((TREE_CODE (arg1) == PLUS_EXPR
10712 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10713 || TREE_CODE (arg1) == MINUS_EXPR)
10714 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10715 0)),
10716 arg0, 0)
10717 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10718 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10719 {
10720 tree val = TREE_OPERAND (arg1, 1);
10721 val = fold_build2_loc (loc, code, type, val,
10722 build_int_cst (TREE_TYPE (val), 0));
10723 return omit_two_operands_loc (loc, type, val,
10724 TREE_OPERAND (arg1, 0), arg0);
10725 }
10726
10727 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10728 if (TREE_CODE (arg0) == MINUS_EXPR
10729 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10730 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10731 1)),
10732 arg1, 0)
10733 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10734 return omit_two_operands_loc (loc, type,
10735 code == NE_EXPR
10736 ? boolean_true_node : boolean_false_node,
10737 TREE_OPERAND (arg0, 1), arg1);
10738
10739 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10740 if (TREE_CODE (arg1) == MINUS_EXPR
10741 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10742 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10743 1)),
10744 arg0, 0)
10745 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10746 return omit_two_operands_loc (loc, type,
10747 code == NE_EXPR
10748 ? boolean_true_node : boolean_false_node,
10749 TREE_OPERAND (arg1, 1), arg0);
10750
10751 /* If this is an EQ or NE comparison with zero and ARG0 is
10752 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10753 two operations, but the latter can be done in one less insn
10754 on machines that have only two-operand insns or on which a
10755 constant cannot be the first operand. */
10756 if (TREE_CODE (arg0) == BIT_AND_EXPR
10757 && integer_zerop (arg1))
10758 {
10759 tree arg00 = TREE_OPERAND (arg0, 0);
10760 tree arg01 = TREE_OPERAND (arg0, 1);
10761 if (TREE_CODE (arg00) == LSHIFT_EXPR
10762 && integer_onep (TREE_OPERAND (arg00, 0)))
10763 {
10764 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10765 arg01, TREE_OPERAND (arg00, 1));
10766 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10767 build_int_cst (TREE_TYPE (arg0), 1));
10768 return fold_build2_loc (loc, code, type,
10769 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10770 arg1);
10771 }
10772 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10773 && integer_onep (TREE_OPERAND (arg01, 0)))
10774 {
10775 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10776 arg00, TREE_OPERAND (arg01, 1));
10777 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10778 build_int_cst (TREE_TYPE (arg0), 1));
10779 return fold_build2_loc (loc, code, type,
10780 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10781 arg1);
10782 }
10783 }
10784
10785 /* If this is an NE or EQ comparison of zero against the result of a
10786 signed MOD operation whose second operand is a power of 2, make
10787 the MOD operation unsigned since it is simpler and equivalent. */
10788 if (integer_zerop (arg1)
10789 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10790 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10791 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10792 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10793 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10794 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10795 {
10796 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10797 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10798 fold_convert_loc (loc, newtype,
10799 TREE_OPERAND (arg0, 0)),
10800 fold_convert_loc (loc, newtype,
10801 TREE_OPERAND (arg0, 1)));
10802
10803 return fold_build2_loc (loc, code, type, newmod,
10804 fold_convert_loc (loc, newtype, arg1));
10805 }
10806
10807 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10808 C1 is a valid shift constant, and C2 is a power of two, i.e.
10809 a single bit. */
10810 if (TREE_CODE (arg0) == BIT_AND_EXPR
10811 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10812 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10813 == INTEGER_CST
10814 && integer_pow2p (TREE_OPERAND (arg0, 1))
10815 && integer_zerop (arg1))
10816 {
10817 tree itype = TREE_TYPE (arg0);
10818 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10819 prec = TYPE_PRECISION (itype);
10820
10821 /* Check for a valid shift count. */
10822 if (wi::ltu_p (arg001, prec))
10823 {
10824 tree arg01 = TREE_OPERAND (arg0, 1);
10825 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10826 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10827 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10828 can be rewritten as (X & (C2 << C1)) != 0. */
10829 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10830 {
10831 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10832 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10833 return fold_build2_loc (loc, code, type, tem,
10834 fold_convert_loc (loc, itype, arg1));
10835 }
10836 /* Otherwise, for signed (arithmetic) shifts,
10837 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10838 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10839 else if (!TYPE_UNSIGNED (itype))
10840 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10841 arg000, build_int_cst (itype, 0));
10842 /* Otherwise, of unsigned (logical) shifts,
10843 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10844 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10845 else
10846 return omit_one_operand_loc (loc, type,
10847 code == EQ_EXPR ? integer_one_node
10848 : integer_zero_node,
10849 arg000);
10850 }
10851 }
10852
10853 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10854 Similarly for NE_EXPR. */
10855 if (TREE_CODE (arg0) == BIT_AND_EXPR
10856 && TREE_CODE (arg1) == INTEGER_CST
10857 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10858 {
10859 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10860 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10861 TREE_OPERAND (arg0, 1));
10862 tree dandnotc
10863 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10864 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10865 notc);
10866 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10867 if (integer_nonzerop (dandnotc))
10868 return omit_one_operand_loc (loc, type, rslt, arg0);
10869 }
10870
10871 /* If this is a comparison of a field, we may be able to simplify it. */
10872 if ((TREE_CODE (arg0) == COMPONENT_REF
10873 || TREE_CODE (arg0) == BIT_FIELD_REF)
10874 /* Handle the constant case even without -O
10875 to make sure the warnings are given. */
10876 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10877 {
10878 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10879 if (t1)
10880 return t1;
10881 }
10882
10883 /* Optimize comparisons of strlen vs zero to a compare of the
10884 first character of the string vs zero. To wit,
10885 strlen(ptr) == 0 => *ptr == 0
10886 strlen(ptr) != 0 => *ptr != 0
10887 Other cases should reduce to one of these two (or a constant)
10888 due to the return value of strlen being unsigned. */
10889 if (TREE_CODE (arg0) == CALL_EXPR
10890 && integer_zerop (arg1))
10891 {
10892 tree fndecl = get_callee_fndecl (arg0);
10893
10894 if (fndecl
10895 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10896 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10897 && call_expr_nargs (arg0) == 1
10898 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10899 {
10900 tree iref = build_fold_indirect_ref_loc (loc,
10901 CALL_EXPR_ARG (arg0, 0));
10902 return fold_build2_loc (loc, code, type, iref,
10903 build_int_cst (TREE_TYPE (iref), 0));
10904 }
10905 }
10906
10907 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10908 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10909 if (TREE_CODE (arg0) == RSHIFT_EXPR
10910 && integer_zerop (arg1)
10911 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10912 {
10913 tree arg00 = TREE_OPERAND (arg0, 0);
10914 tree arg01 = TREE_OPERAND (arg0, 1);
10915 tree itype = TREE_TYPE (arg00);
10916 if (wi::eq_p (arg01, element_precision (itype) - 1))
10917 {
10918 if (TYPE_UNSIGNED (itype))
10919 {
10920 itype = signed_type_for (itype);
10921 arg00 = fold_convert_loc (loc, itype, arg00);
10922 }
10923 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10924 type, arg00, build_zero_cst (itype));
10925 }
10926 }
10927
10928 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10929 (X & C) == 0 when C is a single bit. */
10930 if (TREE_CODE (arg0) == BIT_AND_EXPR
10931 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10932 && integer_zerop (arg1)
10933 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10934 {
10935 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10936 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10937 TREE_OPERAND (arg0, 1));
10938 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10939 type, tem,
10940 fold_convert_loc (loc, TREE_TYPE (arg0),
10941 arg1));
10942 }
10943
10944 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10945 constant C is a power of two, i.e. a single bit. */
10946 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10947 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10948 && integer_zerop (arg1)
10949 && integer_pow2p (TREE_OPERAND (arg0, 1))
10950 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10951 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10952 {
10953 tree arg00 = TREE_OPERAND (arg0, 0);
10954 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10955 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10956 }
10957
10958 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10959 when is C is a power of two, i.e. a single bit. */
10960 if (TREE_CODE (arg0) == BIT_AND_EXPR
10961 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10962 && integer_zerop (arg1)
10963 && integer_pow2p (TREE_OPERAND (arg0, 1))
10964 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10965 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10966 {
10967 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10968 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10969 arg000, TREE_OPERAND (arg0, 1));
10970 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10971 tem, build_int_cst (TREE_TYPE (tem), 0));
10972 }
10973
10974 if (integer_zerop (arg1)
10975 && tree_expr_nonzero_p (arg0))
10976 {
10977 tree res = constant_boolean_node (code==NE_EXPR, type);
10978 return omit_one_operand_loc (loc, type, res, arg0);
10979 }
10980
10981 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10982 if (TREE_CODE (arg0) == BIT_AND_EXPR
10983 && TREE_CODE (arg1) == BIT_AND_EXPR)
10984 {
10985 tree arg00 = TREE_OPERAND (arg0, 0);
10986 tree arg01 = TREE_OPERAND (arg0, 1);
10987 tree arg10 = TREE_OPERAND (arg1, 0);
10988 tree arg11 = TREE_OPERAND (arg1, 1);
10989 tree itype = TREE_TYPE (arg0);
10990
10991 if (operand_equal_p (arg01, arg11, 0))
10992 return fold_build2_loc (loc, code, type,
10993 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10994 fold_build2_loc (loc,
10995 BIT_XOR_EXPR, itype,
10996 arg00, arg10),
10997 arg01),
10998 build_zero_cst (itype));
10999
11000 if (operand_equal_p (arg01, arg10, 0))
11001 return fold_build2_loc (loc, code, type,
11002 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11003 fold_build2_loc (loc,
11004 BIT_XOR_EXPR, itype,
11005 arg00, arg11),
11006 arg01),
11007 build_zero_cst (itype));
11008
11009 if (operand_equal_p (arg00, arg11, 0))
11010 return fold_build2_loc (loc, code, type,
11011 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11012 fold_build2_loc (loc,
11013 BIT_XOR_EXPR, itype,
11014 arg01, arg10),
11015 arg00),
11016 build_zero_cst (itype));
11017
11018 if (operand_equal_p (arg00, arg10, 0))
11019 return fold_build2_loc (loc, code, type,
11020 fold_build2_loc (loc, BIT_AND_EXPR, itype,
11021 fold_build2_loc (loc,
11022 BIT_XOR_EXPR, itype,
11023 arg01, arg11),
11024 arg00),
11025 build_zero_cst (itype));
11026 }
11027
11028 if (TREE_CODE (arg0) == BIT_XOR_EXPR
11029 && TREE_CODE (arg1) == BIT_XOR_EXPR)
11030 {
11031 tree arg00 = TREE_OPERAND (arg0, 0);
11032 tree arg01 = TREE_OPERAND (arg0, 1);
11033 tree arg10 = TREE_OPERAND (arg1, 0);
11034 tree arg11 = TREE_OPERAND (arg1, 1);
11035 tree itype = TREE_TYPE (arg0);
11036
11037 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
11038 operand_equal_p guarantees no side-effects so we don't need
11039 to use omit_one_operand on Z. */
11040 if (operand_equal_p (arg01, arg11, 0))
11041 return fold_build2_loc (loc, code, type, arg00,
11042 fold_convert_loc (loc, TREE_TYPE (arg00),
11043 arg10));
11044 if (operand_equal_p (arg01, arg10, 0))
11045 return fold_build2_loc (loc, code, type, arg00,
11046 fold_convert_loc (loc, TREE_TYPE (arg00),
11047 arg11));
11048 if (operand_equal_p (arg00, arg11, 0))
11049 return fold_build2_loc (loc, code, type, arg01,
11050 fold_convert_loc (loc, TREE_TYPE (arg01),
11051 arg10));
11052 if (operand_equal_p (arg00, arg10, 0))
11053 return fold_build2_loc (loc, code, type, arg01,
11054 fold_convert_loc (loc, TREE_TYPE (arg01),
11055 arg11));
11056
11057 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
11058 if (TREE_CODE (arg01) == INTEGER_CST
11059 && TREE_CODE (arg11) == INTEGER_CST)
11060 {
11061 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
11062 fold_convert_loc (loc, itype, arg11));
11063 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
11064 return fold_build2_loc (loc, code, type, tem,
11065 fold_convert_loc (loc, itype, arg10));
11066 }
11067 }
11068
11069 /* Attempt to simplify equality/inequality comparisons of complex
11070 values. Only lower the comparison if the result is known or
11071 can be simplified to a single scalar comparison. */
11072 if ((TREE_CODE (arg0) == COMPLEX_EXPR
11073 || TREE_CODE (arg0) == COMPLEX_CST)
11074 && (TREE_CODE (arg1) == COMPLEX_EXPR
11075 || TREE_CODE (arg1) == COMPLEX_CST))
11076 {
11077 tree real0, imag0, real1, imag1;
11078 tree rcond, icond;
11079
11080 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11081 {
11082 real0 = TREE_OPERAND (arg0, 0);
11083 imag0 = TREE_OPERAND (arg0, 1);
11084 }
11085 else
11086 {
11087 real0 = TREE_REALPART (arg0);
11088 imag0 = TREE_IMAGPART (arg0);
11089 }
11090
11091 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11092 {
11093 real1 = TREE_OPERAND (arg1, 0);
11094 imag1 = TREE_OPERAND (arg1, 1);
11095 }
11096 else
11097 {
11098 real1 = TREE_REALPART (arg1);
11099 imag1 = TREE_IMAGPART (arg1);
11100 }
11101
11102 rcond = fold_binary_loc (loc, code, type, real0, real1);
11103 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11104 {
11105 if (integer_zerop (rcond))
11106 {
11107 if (code == EQ_EXPR)
11108 return omit_two_operands_loc (loc, type, boolean_false_node,
11109 imag0, imag1);
11110 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11111 }
11112 else
11113 {
11114 if (code == NE_EXPR)
11115 return omit_two_operands_loc (loc, type, boolean_true_node,
11116 imag0, imag1);
11117 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11118 }
11119 }
11120
11121 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11122 if (icond && TREE_CODE (icond) == INTEGER_CST)
11123 {
11124 if (integer_zerop (icond))
11125 {
11126 if (code == EQ_EXPR)
11127 return omit_two_operands_loc (loc, type, boolean_false_node,
11128 real0, real1);
11129 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11130 }
11131 else
11132 {
11133 if (code == NE_EXPR)
11134 return omit_two_operands_loc (loc, type, boolean_true_node,
11135 real0, real1);
11136 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11137 }
11138 }
11139 }
11140
11141 return NULL_TREE;
11142
11143 case LT_EXPR:
11144 case GT_EXPR:
11145 case LE_EXPR:
11146 case GE_EXPR:
11147 tem = fold_comparison (loc, code, type, op0, op1);
11148 if (tem != NULL_TREE)
11149 return tem;
11150
11151 /* Transform comparisons of the form X +- C CMP X. */
11152 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11153 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11154 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11155 && !HONOR_SNANS (arg0))
11156 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11157 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11158 {
11159 tree arg01 = TREE_OPERAND (arg0, 1);
11160 enum tree_code code0 = TREE_CODE (arg0);
11161 int is_positive;
11162
11163 if (TREE_CODE (arg01) == REAL_CST)
11164 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11165 else
11166 is_positive = tree_int_cst_sgn (arg01);
11167
11168 /* (X - c) > X becomes false. */
11169 if (code == GT_EXPR
11170 && ((code0 == MINUS_EXPR && is_positive >= 0)
11171 || (code0 == PLUS_EXPR && is_positive <= 0)))
11172 {
11173 if (TREE_CODE (arg01) == INTEGER_CST
11174 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11175 fold_overflow_warning (("assuming signed overflow does not "
11176 "occur when assuming that (X - c) > X "
11177 "is always false"),
11178 WARN_STRICT_OVERFLOW_ALL);
11179 return constant_boolean_node (0, type);
11180 }
11181
11182 /* Likewise (X + c) < X becomes false. */
11183 if (code == LT_EXPR
11184 && ((code0 == PLUS_EXPR && is_positive >= 0)
11185 || (code0 == MINUS_EXPR && is_positive <= 0)))
11186 {
11187 if (TREE_CODE (arg01) == INTEGER_CST
11188 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11189 fold_overflow_warning (("assuming signed overflow does not "
11190 "occur when assuming that "
11191 "(X + c) < X is always false"),
11192 WARN_STRICT_OVERFLOW_ALL);
11193 return constant_boolean_node (0, type);
11194 }
11195
11196 /* Convert (X - c) <= X to true. */
11197 if (!HONOR_NANS (arg1)
11198 && code == LE_EXPR
11199 && ((code0 == MINUS_EXPR && is_positive >= 0)
11200 || (code0 == PLUS_EXPR && is_positive <= 0)))
11201 {
11202 if (TREE_CODE (arg01) == INTEGER_CST
11203 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11204 fold_overflow_warning (("assuming signed overflow does not "
11205 "occur when assuming that "
11206 "(X - c) <= X is always true"),
11207 WARN_STRICT_OVERFLOW_ALL);
11208 return constant_boolean_node (1, type);
11209 }
11210
11211 /* Convert (X + c) >= X to true. */
11212 if (!HONOR_NANS (arg1)
11213 && code == GE_EXPR
11214 && ((code0 == PLUS_EXPR && is_positive >= 0)
11215 || (code0 == MINUS_EXPR && is_positive <= 0)))
11216 {
11217 if (TREE_CODE (arg01) == INTEGER_CST
11218 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11219 fold_overflow_warning (("assuming signed overflow does not "
11220 "occur when assuming that "
11221 "(X + c) >= X is always true"),
11222 WARN_STRICT_OVERFLOW_ALL);
11223 return constant_boolean_node (1, type);
11224 }
11225
11226 if (TREE_CODE (arg01) == INTEGER_CST)
11227 {
11228 /* Convert X + c > X and X - c < X to true for integers. */
11229 if (code == GT_EXPR
11230 && ((code0 == PLUS_EXPR && is_positive > 0)
11231 || (code0 == MINUS_EXPR && is_positive < 0)))
11232 {
11233 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11234 fold_overflow_warning (("assuming signed overflow does "
11235 "not occur when assuming that "
11236 "(X + c) > X is always true"),
11237 WARN_STRICT_OVERFLOW_ALL);
11238 return constant_boolean_node (1, type);
11239 }
11240
11241 if (code == LT_EXPR
11242 && ((code0 == MINUS_EXPR && is_positive > 0)
11243 || (code0 == PLUS_EXPR && is_positive < 0)))
11244 {
11245 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11246 fold_overflow_warning (("assuming signed overflow does "
11247 "not occur when assuming that "
11248 "(X - c) < X is always true"),
11249 WARN_STRICT_OVERFLOW_ALL);
11250 return constant_boolean_node (1, type);
11251 }
11252
11253 /* Convert X + c <= X and X - c >= X to false for integers. */
11254 if (code == LE_EXPR
11255 && ((code0 == PLUS_EXPR && is_positive > 0)
11256 || (code0 == MINUS_EXPR && is_positive < 0)))
11257 {
11258 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11259 fold_overflow_warning (("assuming signed overflow does "
11260 "not occur when assuming that "
11261 "(X + c) <= X is always false"),
11262 WARN_STRICT_OVERFLOW_ALL);
11263 return constant_boolean_node (0, type);
11264 }
11265
11266 if (code == GE_EXPR
11267 && ((code0 == MINUS_EXPR && is_positive > 0)
11268 || (code0 == PLUS_EXPR && is_positive < 0)))
11269 {
11270 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11271 fold_overflow_warning (("assuming signed overflow does "
11272 "not occur when assuming that "
11273 "(X - c) >= X is always false"),
11274 WARN_STRICT_OVERFLOW_ALL);
11275 return constant_boolean_node (0, type);
11276 }
11277 }
11278 }
11279
11280 /* If we are comparing an ABS_EXPR with a constant, we can
11281 convert all the cases into explicit comparisons, but they may
11282 well not be faster than doing the ABS and one comparison.
11283 But ABS (X) <= C is a range comparison, which becomes a subtraction
11284 and a comparison, and is probably faster. */
11285 if (code == LE_EXPR
11286 && TREE_CODE (arg1) == INTEGER_CST
11287 && TREE_CODE (arg0) == ABS_EXPR
11288 && ! TREE_SIDE_EFFECTS (arg0)
11289 && (0 != (tem = negate_expr (arg1)))
11290 && TREE_CODE (tem) == INTEGER_CST
11291 && !TREE_OVERFLOW (tem))
11292 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11293 build2 (GE_EXPR, type,
11294 TREE_OPERAND (arg0, 0), tem),
11295 build2 (LE_EXPR, type,
11296 TREE_OPERAND (arg0, 0), arg1));
11297
11298 /* Convert ABS_EXPR<x> >= 0 to true. */
11299 strict_overflow_p = false;
11300 if (code == GE_EXPR
11301 && (integer_zerop (arg1)
11302 || (! HONOR_NANS (arg0)
11303 && real_zerop (arg1)))
11304 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11305 {
11306 if (strict_overflow_p)
11307 fold_overflow_warning (("assuming signed overflow does not occur "
11308 "when simplifying comparison of "
11309 "absolute value and zero"),
11310 WARN_STRICT_OVERFLOW_CONDITIONAL);
11311 return omit_one_operand_loc (loc, type,
11312 constant_boolean_node (true, type),
11313 arg0);
11314 }
11315
11316 /* Convert ABS_EXPR<x> < 0 to false. */
11317 strict_overflow_p = false;
11318 if (code == LT_EXPR
11319 && (integer_zerop (arg1) || real_zerop (arg1))
11320 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11321 {
11322 if (strict_overflow_p)
11323 fold_overflow_warning (("assuming signed overflow does not occur "
11324 "when simplifying comparison of "
11325 "absolute value and zero"),
11326 WARN_STRICT_OVERFLOW_CONDITIONAL);
11327 return omit_one_operand_loc (loc, type,
11328 constant_boolean_node (false, type),
11329 arg0);
11330 }
11331
11332 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11333 and similarly for >= into !=. */
11334 if ((code == LT_EXPR || code == GE_EXPR)
11335 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11336 && TREE_CODE (arg1) == LSHIFT_EXPR
11337 && integer_onep (TREE_OPERAND (arg1, 0)))
11338 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11339 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11340 TREE_OPERAND (arg1, 1)),
11341 build_zero_cst (TREE_TYPE (arg0)));
11342
11343 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11344 otherwise Y might be >= # of bits in X's type and thus e.g.
11345 (unsigned char) (1 << Y) for Y 15 might be 0.
11346 If the cast is widening, then 1 << Y should have unsigned type,
11347 otherwise if Y is number of bits in the signed shift type minus 1,
11348 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11349 31 might be 0xffffffff80000000. */
11350 if ((code == LT_EXPR || code == GE_EXPR)
11351 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11352 && CONVERT_EXPR_P (arg1)
11353 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11354 && (element_precision (TREE_TYPE (arg1))
11355 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11356 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11357 || (element_precision (TREE_TYPE (arg1))
11358 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11359 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11360 {
11361 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11362 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11363 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11364 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11365 build_zero_cst (TREE_TYPE (arg0)));
11366 }
11367
11368 return NULL_TREE;
11369
11370 case UNORDERED_EXPR:
11371 case ORDERED_EXPR:
11372 case UNLT_EXPR:
11373 case UNLE_EXPR:
11374 case UNGT_EXPR:
11375 case UNGE_EXPR:
11376 case UNEQ_EXPR:
11377 case LTGT_EXPR:
11378 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11379 {
11380 tree targ0 = strip_float_extensions (arg0);
11381 tree targ1 = strip_float_extensions (arg1);
11382 tree newtype = TREE_TYPE (targ0);
11383
11384 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11385 newtype = TREE_TYPE (targ1);
11386
11387 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11388 return fold_build2_loc (loc, code, type,
11389 fold_convert_loc (loc, newtype, targ0),
11390 fold_convert_loc (loc, newtype, targ1));
11391 }
11392
11393 return NULL_TREE;
11394
11395 case COMPOUND_EXPR:
11396 /* When pedantic, a compound expression can be neither an lvalue
11397 nor an integer constant expression. */
11398 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11399 return NULL_TREE;
11400 /* Don't let (0, 0) be null pointer constant. */
11401 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11402 : fold_convert_loc (loc, type, arg1);
11403 return pedantic_non_lvalue_loc (loc, tem);
11404
11405 case ASSERT_EXPR:
11406 /* An ASSERT_EXPR should never be passed to fold_binary. */
11407 gcc_unreachable ();
11408
11409 default:
11410 return NULL_TREE;
11411 } /* switch (code) */
11412 }
11413
11414 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11415 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11416 of GOTO_EXPR. */
11417
11418 static tree
11419 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11420 {
11421 switch (TREE_CODE (*tp))
11422 {
11423 case LABEL_EXPR:
11424 return *tp;
11425
11426 case GOTO_EXPR:
11427 *walk_subtrees = 0;
11428
11429 /* ... fall through ... */
11430
11431 default:
11432 return NULL_TREE;
11433 }
11434 }
11435
11436 /* Return whether the sub-tree ST contains a label which is accessible from
11437 outside the sub-tree. */
11438
11439 static bool
11440 contains_label_p (tree st)
11441 {
11442 return
11443 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11444 }
11445
11446 /* Fold a ternary expression of code CODE and type TYPE with operands
11447 OP0, OP1, and OP2. Return the folded expression if folding is
11448 successful. Otherwise, return NULL_TREE. */
11449
11450 tree
11451 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11452 tree op0, tree op1, tree op2)
11453 {
11454 tree tem;
11455 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11456 enum tree_code_class kind = TREE_CODE_CLASS (code);
11457
11458 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11459 && TREE_CODE_LENGTH (code) == 3);
11460
11461 /* If this is a commutative operation, and OP0 is a constant, move it
11462 to OP1 to reduce the number of tests below. */
11463 if (commutative_ternary_tree_code (code)
11464 && tree_swap_operands_p (op0, op1, true))
11465 return fold_build3_loc (loc, code, type, op1, op0, op2);
11466
11467 tem = generic_simplify (loc, code, type, op0, op1, op2);
11468 if (tem)
11469 return tem;
11470
11471 /* Strip any conversions that don't change the mode. This is safe
11472 for every expression, except for a comparison expression because
11473 its signedness is derived from its operands. So, in the latter
11474 case, only strip conversions that don't change the signedness.
11475
11476 Note that this is done as an internal manipulation within the
11477 constant folder, in order to find the simplest representation of
11478 the arguments so that their form can be studied. In any cases,
11479 the appropriate type conversions should be put back in the tree
11480 that will get out of the constant folder. */
11481 if (op0)
11482 {
11483 arg0 = op0;
11484 STRIP_NOPS (arg0);
11485 }
11486
11487 if (op1)
11488 {
11489 arg1 = op1;
11490 STRIP_NOPS (arg1);
11491 }
11492
11493 if (op2)
11494 {
11495 arg2 = op2;
11496 STRIP_NOPS (arg2);
11497 }
11498
11499 switch (code)
11500 {
11501 case COMPONENT_REF:
11502 if (TREE_CODE (arg0) == CONSTRUCTOR
11503 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11504 {
11505 unsigned HOST_WIDE_INT idx;
11506 tree field, value;
11507 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11508 if (field == arg1)
11509 return value;
11510 }
11511 return NULL_TREE;
11512
11513 case COND_EXPR:
11514 case VEC_COND_EXPR:
11515 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11516 so all simple results must be passed through pedantic_non_lvalue. */
11517 if (TREE_CODE (arg0) == INTEGER_CST)
11518 {
11519 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11520 tem = integer_zerop (arg0) ? op2 : op1;
11521 /* Only optimize constant conditions when the selected branch
11522 has the same type as the COND_EXPR. This avoids optimizing
11523 away "c ? x : throw", where the throw has a void type.
11524 Avoid throwing away that operand which contains label. */
11525 if ((!TREE_SIDE_EFFECTS (unused_op)
11526 || !contains_label_p (unused_op))
11527 && (! VOID_TYPE_P (TREE_TYPE (tem))
11528 || VOID_TYPE_P (type)))
11529 return pedantic_non_lvalue_loc (loc, tem);
11530 return NULL_TREE;
11531 }
11532 else if (TREE_CODE (arg0) == VECTOR_CST)
11533 {
11534 if ((TREE_CODE (arg1) == VECTOR_CST
11535 || TREE_CODE (arg1) == CONSTRUCTOR)
11536 && (TREE_CODE (arg2) == VECTOR_CST
11537 || TREE_CODE (arg2) == CONSTRUCTOR))
11538 {
11539 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11540 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11541 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11542 for (i = 0; i < nelts; i++)
11543 {
11544 tree val = VECTOR_CST_ELT (arg0, i);
11545 if (integer_all_onesp (val))
11546 sel[i] = i;
11547 else if (integer_zerop (val))
11548 sel[i] = nelts + i;
11549 else /* Currently unreachable. */
11550 return NULL_TREE;
11551 }
11552 tree t = fold_vec_perm (type, arg1, arg2, sel);
11553 if (t != NULL_TREE)
11554 return t;
11555 }
11556 }
11557
11558 /* If we have A op B ? A : C, we may be able to convert this to a
11559 simpler expression, depending on the operation and the values
11560 of B and C. Signed zeros prevent all of these transformations,
11561 for reasons given above each one.
11562
11563 Also try swapping the arguments and inverting the conditional. */
11564 if (COMPARISON_CLASS_P (arg0)
11565 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11566 arg1, TREE_OPERAND (arg0, 1))
11567 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11568 {
11569 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11570 if (tem)
11571 return tem;
11572 }
11573
11574 if (COMPARISON_CLASS_P (arg0)
11575 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11576 op2,
11577 TREE_OPERAND (arg0, 1))
11578 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11579 {
11580 location_t loc0 = expr_location_or (arg0, loc);
11581 tem = fold_invert_truthvalue (loc0, arg0);
11582 if (tem && COMPARISON_CLASS_P (tem))
11583 {
11584 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11585 if (tem)
11586 return tem;
11587 }
11588 }
11589
11590 /* If the second operand is simpler than the third, swap them
11591 since that produces better jump optimization results. */
11592 if (truth_value_p (TREE_CODE (arg0))
11593 && tree_swap_operands_p (op1, op2, false))
11594 {
11595 location_t loc0 = expr_location_or (arg0, loc);
11596 /* See if this can be inverted. If it can't, possibly because
11597 it was a floating-point inequality comparison, don't do
11598 anything. */
11599 tem = fold_invert_truthvalue (loc0, arg0);
11600 if (tem)
11601 return fold_build3_loc (loc, code, type, tem, op2, op1);
11602 }
11603
11604 /* Convert A ? 1 : 0 to simply A. */
11605 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11606 : (integer_onep (op1)
11607 && !VECTOR_TYPE_P (type)))
11608 && integer_zerop (op2)
11609 /* If we try to convert OP0 to our type, the
11610 call to fold will try to move the conversion inside
11611 a COND, which will recurse. In that case, the COND_EXPR
11612 is probably the best choice, so leave it alone. */
11613 && type == TREE_TYPE (arg0))
11614 return pedantic_non_lvalue_loc (loc, arg0);
11615
11616 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11617 over COND_EXPR in cases such as floating point comparisons. */
11618 if (integer_zerop (op1)
11619 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11620 : (integer_onep (op2)
11621 && !VECTOR_TYPE_P (type)))
11622 && truth_value_p (TREE_CODE (arg0)))
11623 return pedantic_non_lvalue_loc (loc,
11624 fold_convert_loc (loc, type,
11625 invert_truthvalue_loc (loc,
11626 arg0)));
11627
11628 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11629 if (TREE_CODE (arg0) == LT_EXPR
11630 && integer_zerop (TREE_OPERAND (arg0, 1))
11631 && integer_zerop (op2)
11632 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11633 {
11634 /* sign_bit_p looks through both zero and sign extensions,
11635 but for this optimization only sign extensions are
11636 usable. */
11637 tree tem2 = TREE_OPERAND (arg0, 0);
11638 while (tem != tem2)
11639 {
11640 if (TREE_CODE (tem2) != NOP_EXPR
11641 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11642 {
11643 tem = NULL_TREE;
11644 break;
11645 }
11646 tem2 = TREE_OPERAND (tem2, 0);
11647 }
11648 /* sign_bit_p only checks ARG1 bits within A's precision.
11649 If <sign bit of A> has wider type than A, bits outside
11650 of A's precision in <sign bit of A> need to be checked.
11651 If they are all 0, this optimization needs to be done
11652 in unsigned A's type, if they are all 1 in signed A's type,
11653 otherwise this can't be done. */
11654 if (tem
11655 && TYPE_PRECISION (TREE_TYPE (tem))
11656 < TYPE_PRECISION (TREE_TYPE (arg1))
11657 && TYPE_PRECISION (TREE_TYPE (tem))
11658 < TYPE_PRECISION (type))
11659 {
11660 int inner_width, outer_width;
11661 tree tem_type;
11662
11663 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11664 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11665 if (outer_width > TYPE_PRECISION (type))
11666 outer_width = TYPE_PRECISION (type);
11667
11668 wide_int mask = wi::shifted_mask
11669 (inner_width, outer_width - inner_width, false,
11670 TYPE_PRECISION (TREE_TYPE (arg1)));
11671
11672 wide_int common = mask & arg1;
11673 if (common == mask)
11674 {
11675 tem_type = signed_type_for (TREE_TYPE (tem));
11676 tem = fold_convert_loc (loc, tem_type, tem);
11677 }
11678 else if (common == 0)
11679 {
11680 tem_type = unsigned_type_for (TREE_TYPE (tem));
11681 tem = fold_convert_loc (loc, tem_type, tem);
11682 }
11683 else
11684 tem = NULL;
11685 }
11686
11687 if (tem)
11688 return
11689 fold_convert_loc (loc, type,
11690 fold_build2_loc (loc, BIT_AND_EXPR,
11691 TREE_TYPE (tem), tem,
11692 fold_convert_loc (loc,
11693 TREE_TYPE (tem),
11694 arg1)));
11695 }
11696
11697 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11698 already handled above. */
11699 if (TREE_CODE (arg0) == BIT_AND_EXPR
11700 && integer_onep (TREE_OPERAND (arg0, 1))
11701 && integer_zerop (op2)
11702 && integer_pow2p (arg1))
11703 {
11704 tree tem = TREE_OPERAND (arg0, 0);
11705 STRIP_NOPS (tem);
11706 if (TREE_CODE (tem) == RSHIFT_EXPR
11707 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11708 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11709 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11710 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11711 TREE_OPERAND (tem, 0), arg1);
11712 }
11713
11714 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11715 is probably obsolete because the first operand should be a
11716 truth value (that's why we have the two cases above), but let's
11717 leave it in until we can confirm this for all front-ends. */
11718 if (integer_zerop (op2)
11719 && TREE_CODE (arg0) == NE_EXPR
11720 && integer_zerop (TREE_OPERAND (arg0, 1))
11721 && integer_pow2p (arg1)
11722 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11723 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11724 arg1, OEP_ONLY_CONST))
11725 return pedantic_non_lvalue_loc (loc,
11726 fold_convert_loc (loc, type,
11727 TREE_OPERAND (arg0, 0)));
11728
11729 /* Disable the transformations below for vectors, since
11730 fold_binary_op_with_conditional_arg may undo them immediately,
11731 yielding an infinite loop. */
11732 if (code == VEC_COND_EXPR)
11733 return NULL_TREE;
11734
11735 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11736 if (integer_zerop (op2)
11737 && truth_value_p (TREE_CODE (arg0))
11738 && truth_value_p (TREE_CODE (arg1))
11739 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11740 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11741 : TRUTH_ANDIF_EXPR,
11742 type, fold_convert_loc (loc, type, arg0), arg1);
11743
11744 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11745 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11746 && truth_value_p (TREE_CODE (arg0))
11747 && truth_value_p (TREE_CODE (arg1))
11748 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11749 {
11750 location_t loc0 = expr_location_or (arg0, loc);
11751 /* Only perform transformation if ARG0 is easily inverted. */
11752 tem = fold_invert_truthvalue (loc0, arg0);
11753 if (tem)
11754 return fold_build2_loc (loc, code == VEC_COND_EXPR
11755 ? BIT_IOR_EXPR
11756 : TRUTH_ORIF_EXPR,
11757 type, fold_convert_loc (loc, type, tem),
11758 arg1);
11759 }
11760
11761 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11762 if (integer_zerop (arg1)
11763 && truth_value_p (TREE_CODE (arg0))
11764 && truth_value_p (TREE_CODE (op2))
11765 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11766 {
11767 location_t loc0 = expr_location_or (arg0, loc);
11768 /* Only perform transformation if ARG0 is easily inverted. */
11769 tem = fold_invert_truthvalue (loc0, arg0);
11770 if (tem)
11771 return fold_build2_loc (loc, code == VEC_COND_EXPR
11772 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11773 type, fold_convert_loc (loc, type, tem),
11774 op2);
11775 }
11776
11777 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11778 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11779 && truth_value_p (TREE_CODE (arg0))
11780 && truth_value_p (TREE_CODE (op2))
11781 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11782 return fold_build2_loc (loc, code == VEC_COND_EXPR
11783 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11784 type, fold_convert_loc (loc, type, arg0), op2);
11785
11786 return NULL_TREE;
11787
11788 case CALL_EXPR:
11789 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11790 of fold_ternary on them. */
11791 gcc_unreachable ();
11792
11793 case BIT_FIELD_REF:
11794 if ((TREE_CODE (arg0) == VECTOR_CST
11795 || (TREE_CODE (arg0) == CONSTRUCTOR
11796 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11797 && (type == TREE_TYPE (TREE_TYPE (arg0))
11798 || (TREE_CODE (type) == VECTOR_TYPE
11799 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11800 {
11801 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11802 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11803 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11804 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11805
11806 if (n != 0
11807 && (idx % width) == 0
11808 && (n % width) == 0
11809 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11810 {
11811 idx = idx / width;
11812 n = n / width;
11813
11814 if (TREE_CODE (arg0) == VECTOR_CST)
11815 {
11816 if (n == 1)
11817 return VECTOR_CST_ELT (arg0, idx);
11818
11819 tree *vals = XALLOCAVEC (tree, n);
11820 for (unsigned i = 0; i < n; ++i)
11821 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11822 return build_vector (type, vals);
11823 }
11824
11825 /* Constructor elements can be subvectors. */
11826 unsigned HOST_WIDE_INT k = 1;
11827 if (CONSTRUCTOR_NELTS (arg0) != 0)
11828 {
11829 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11830 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11831 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11832 }
11833
11834 /* We keep an exact subset of the constructor elements. */
11835 if ((idx % k) == 0 && (n % k) == 0)
11836 {
11837 if (CONSTRUCTOR_NELTS (arg0) == 0)
11838 return build_constructor (type, NULL);
11839 idx /= k;
11840 n /= k;
11841 if (n == 1)
11842 {
11843 if (idx < CONSTRUCTOR_NELTS (arg0))
11844 return CONSTRUCTOR_ELT (arg0, idx)->value;
11845 return build_zero_cst (type);
11846 }
11847
11848 vec<constructor_elt, va_gc> *vals;
11849 vec_alloc (vals, n);
11850 for (unsigned i = 0;
11851 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11852 ++i)
11853 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11854 CONSTRUCTOR_ELT
11855 (arg0, idx + i)->value);
11856 return build_constructor (type, vals);
11857 }
11858 /* The bitfield references a single constructor element. */
11859 else if (idx + n <= (idx / k + 1) * k)
11860 {
11861 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11862 return build_zero_cst (type);
11863 else if (n == k)
11864 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11865 else
11866 return fold_build3_loc (loc, code, type,
11867 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11868 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11869 }
11870 }
11871 }
11872
11873 /* A bit-field-ref that referenced the full argument can be stripped. */
11874 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11875 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11876 && integer_zerop (op2))
11877 return fold_convert_loc (loc, type, arg0);
11878
11879 /* On constants we can use native encode/interpret to constant
11880 fold (nearly) all BIT_FIELD_REFs. */
11881 if (CONSTANT_CLASS_P (arg0)
11882 && can_native_interpret_type_p (type)
11883 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11884 /* This limitation should not be necessary, we just need to
11885 round this up to mode size. */
11886 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11887 /* Need bit-shifting of the buffer to relax the following. */
11888 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11889 {
11890 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11891 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11892 unsigned HOST_WIDE_INT clen;
11893 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11894 /* ??? We cannot tell native_encode_expr to start at
11895 some random byte only. So limit us to a reasonable amount
11896 of work. */
11897 if (clen <= 4096)
11898 {
11899 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11900 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11901 if (len > 0
11902 && len * BITS_PER_UNIT >= bitpos + bitsize)
11903 {
11904 tree v = native_interpret_expr (type,
11905 b + bitpos / BITS_PER_UNIT,
11906 bitsize / BITS_PER_UNIT);
11907 if (v)
11908 return v;
11909 }
11910 }
11911 }
11912
11913 return NULL_TREE;
11914
11915 case FMA_EXPR:
11916 /* For integers we can decompose the FMA if possible. */
11917 if (TREE_CODE (arg0) == INTEGER_CST
11918 && TREE_CODE (arg1) == INTEGER_CST)
11919 return fold_build2_loc (loc, PLUS_EXPR, type,
11920 const_binop (MULT_EXPR, arg0, arg1), arg2);
11921 if (integer_zerop (arg2))
11922 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11923
11924 return fold_fma (loc, type, arg0, arg1, arg2);
11925
11926 case VEC_PERM_EXPR:
11927 if (TREE_CODE (arg2) == VECTOR_CST)
11928 {
11929 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11930 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11931 unsigned char *sel2 = sel + nelts;
11932 bool need_mask_canon = false;
11933 bool need_mask_canon2 = false;
11934 bool all_in_vec0 = true;
11935 bool all_in_vec1 = true;
11936 bool maybe_identity = true;
11937 bool single_arg = (op0 == op1);
11938 bool changed = false;
11939
11940 mask2 = 2 * nelts - 1;
11941 mask = single_arg ? (nelts - 1) : mask2;
11942 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11943 for (i = 0; i < nelts; i++)
11944 {
11945 tree val = VECTOR_CST_ELT (arg2, i);
11946 if (TREE_CODE (val) != INTEGER_CST)
11947 return NULL_TREE;
11948
11949 /* Make sure that the perm value is in an acceptable
11950 range. */
11951 wide_int t = val;
11952 need_mask_canon |= wi::gtu_p (t, mask);
11953 need_mask_canon2 |= wi::gtu_p (t, mask2);
11954 sel[i] = t.to_uhwi () & mask;
11955 sel2[i] = t.to_uhwi () & mask2;
11956
11957 if (sel[i] < nelts)
11958 all_in_vec1 = false;
11959 else
11960 all_in_vec0 = false;
11961
11962 if ((sel[i] & (nelts-1)) != i)
11963 maybe_identity = false;
11964 }
11965
11966 if (maybe_identity)
11967 {
11968 if (all_in_vec0)
11969 return op0;
11970 if (all_in_vec1)
11971 return op1;
11972 }
11973
11974 if (all_in_vec0)
11975 op1 = op0;
11976 else if (all_in_vec1)
11977 {
11978 op0 = op1;
11979 for (i = 0; i < nelts; i++)
11980 sel[i] -= nelts;
11981 need_mask_canon = true;
11982 }
11983
11984 if ((TREE_CODE (op0) == VECTOR_CST
11985 || TREE_CODE (op0) == CONSTRUCTOR)
11986 && (TREE_CODE (op1) == VECTOR_CST
11987 || TREE_CODE (op1) == CONSTRUCTOR))
11988 {
11989 tree t = fold_vec_perm (type, op0, op1, sel);
11990 if (t != NULL_TREE)
11991 return t;
11992 }
11993
11994 if (op0 == op1 && !single_arg)
11995 changed = true;
11996
11997 /* Some targets are deficient and fail to expand a single
11998 argument permutation while still allowing an equivalent
11999 2-argument version. */
12000 if (need_mask_canon && arg2 == op2
12001 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
12002 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
12003 {
12004 need_mask_canon = need_mask_canon2;
12005 sel = sel2;
12006 }
12007
12008 if (need_mask_canon && arg2 == op2)
12009 {
12010 tree *tsel = XALLOCAVEC (tree, nelts);
12011 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
12012 for (i = 0; i < nelts; i++)
12013 tsel[i] = build_int_cst (eltype, sel[i]);
12014 op2 = build_vector (TREE_TYPE (arg2), tsel);
12015 changed = true;
12016 }
12017
12018 if (changed)
12019 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
12020 }
12021 return NULL_TREE;
12022
12023 default:
12024 return NULL_TREE;
12025 } /* switch (code) */
12026 }
12027
12028 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
12029 of an array (or vector). */
12030
12031 tree
12032 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
12033 {
12034 tree index_type = NULL_TREE;
12035 offset_int low_bound = 0;
12036
12037 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
12038 {
12039 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
12040 if (domain_type && TYPE_MIN_VALUE (domain_type))
12041 {
12042 /* Static constructors for variably sized objects makes no sense. */
12043 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
12044 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
12045 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
12046 }
12047 }
12048
12049 if (index_type)
12050 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
12051 TYPE_SIGN (index_type));
12052
12053 offset_int index = low_bound - 1;
12054 if (index_type)
12055 index = wi::ext (index, TYPE_PRECISION (index_type),
12056 TYPE_SIGN (index_type));
12057
12058 offset_int max_index;
12059 unsigned HOST_WIDE_INT cnt;
12060 tree cfield, cval;
12061
12062 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
12063 {
12064 /* Array constructor might explicitly set index, or specify a range,
12065 or leave index NULL meaning that it is next index after previous
12066 one. */
12067 if (cfield)
12068 {
12069 if (TREE_CODE (cfield) == INTEGER_CST)
12070 max_index = index = wi::to_offset (cfield);
12071 else
12072 {
12073 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
12074 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12075 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12076 }
12077 }
12078 else
12079 {
12080 index += 1;
12081 if (index_type)
12082 index = wi::ext (index, TYPE_PRECISION (index_type),
12083 TYPE_SIGN (index_type));
12084 max_index = index;
12085 }
12086
12087 /* Do we have match? */
12088 if (wi::cmpu (access_index, index) >= 0
12089 && wi::cmpu (access_index, max_index) <= 0)
12090 return cval;
12091 }
12092 return NULL_TREE;
12093 }
12094
12095 /* Perform constant folding and related simplification of EXPR.
12096 The related simplifications include x*1 => x, x*0 => 0, etc.,
12097 and application of the associative law.
12098 NOP_EXPR conversions may be removed freely (as long as we
12099 are careful not to change the type of the overall expression).
12100 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12101 but we can constant-fold them if they have constant operands. */
12102
12103 #ifdef ENABLE_FOLD_CHECKING
12104 # define fold(x) fold_1 (x)
12105 static tree fold_1 (tree);
12106 static
12107 #endif
12108 tree
12109 fold (tree expr)
12110 {
12111 const tree t = expr;
12112 enum tree_code code = TREE_CODE (t);
12113 enum tree_code_class kind = TREE_CODE_CLASS (code);
12114 tree tem;
12115 location_t loc = EXPR_LOCATION (expr);
12116
12117 /* Return right away if a constant. */
12118 if (kind == tcc_constant)
12119 return t;
12120
12121 /* CALL_EXPR-like objects with variable numbers of operands are
12122 treated specially. */
12123 if (kind == tcc_vl_exp)
12124 {
12125 if (code == CALL_EXPR)
12126 {
12127 tem = fold_call_expr (loc, expr, false);
12128 return tem ? tem : expr;
12129 }
12130 return expr;
12131 }
12132
12133 if (IS_EXPR_CODE_CLASS (kind))
12134 {
12135 tree type = TREE_TYPE (t);
12136 tree op0, op1, op2;
12137
12138 switch (TREE_CODE_LENGTH (code))
12139 {
12140 case 1:
12141 op0 = TREE_OPERAND (t, 0);
12142 tem = fold_unary_loc (loc, code, type, op0);
12143 return tem ? tem : expr;
12144 case 2:
12145 op0 = TREE_OPERAND (t, 0);
12146 op1 = TREE_OPERAND (t, 1);
12147 tem = fold_binary_loc (loc, code, type, op0, op1);
12148 return tem ? tem : expr;
12149 case 3:
12150 op0 = TREE_OPERAND (t, 0);
12151 op1 = TREE_OPERAND (t, 1);
12152 op2 = TREE_OPERAND (t, 2);
12153 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12154 return tem ? tem : expr;
12155 default:
12156 break;
12157 }
12158 }
12159
12160 switch (code)
12161 {
12162 case ARRAY_REF:
12163 {
12164 tree op0 = TREE_OPERAND (t, 0);
12165 tree op1 = TREE_OPERAND (t, 1);
12166
12167 if (TREE_CODE (op1) == INTEGER_CST
12168 && TREE_CODE (op0) == CONSTRUCTOR
12169 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12170 {
12171 tree val = get_array_ctor_element_at_index (op0,
12172 wi::to_offset (op1));
12173 if (val)
12174 return val;
12175 }
12176
12177 return t;
12178 }
12179
12180 /* Return a VECTOR_CST if possible. */
12181 case CONSTRUCTOR:
12182 {
12183 tree type = TREE_TYPE (t);
12184 if (TREE_CODE (type) != VECTOR_TYPE)
12185 return t;
12186
12187 unsigned i;
12188 tree val;
12189 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12190 if (! CONSTANT_CLASS_P (val))
12191 return t;
12192
12193 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12194 }
12195
12196 case CONST_DECL:
12197 return fold (DECL_INITIAL (t));
12198
12199 default:
12200 return t;
12201 } /* switch (code) */
12202 }
12203
12204 #ifdef ENABLE_FOLD_CHECKING
12205 #undef fold
12206
12207 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12208 hash_table<nofree_ptr_hash<const tree_node> > *);
12209 static void fold_check_failed (const_tree, const_tree);
12210 void print_fold_checksum (const_tree);
12211
12212 /* When --enable-checking=fold, compute a digest of expr before
12213 and after actual fold call to see if fold did not accidentally
12214 change original expr. */
12215
12216 tree
12217 fold (tree expr)
12218 {
12219 tree ret;
12220 struct md5_ctx ctx;
12221 unsigned char checksum_before[16], checksum_after[16];
12222 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12223
12224 md5_init_ctx (&ctx);
12225 fold_checksum_tree (expr, &ctx, &ht);
12226 md5_finish_ctx (&ctx, checksum_before);
12227 ht.empty ();
12228
12229 ret = fold_1 (expr);
12230
12231 md5_init_ctx (&ctx);
12232 fold_checksum_tree (expr, &ctx, &ht);
12233 md5_finish_ctx (&ctx, checksum_after);
12234
12235 if (memcmp (checksum_before, checksum_after, 16))
12236 fold_check_failed (expr, ret);
12237
12238 return ret;
12239 }
12240
12241 void
12242 print_fold_checksum (const_tree expr)
12243 {
12244 struct md5_ctx ctx;
12245 unsigned char checksum[16], cnt;
12246 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12247
12248 md5_init_ctx (&ctx);
12249 fold_checksum_tree (expr, &ctx, &ht);
12250 md5_finish_ctx (&ctx, checksum);
12251 for (cnt = 0; cnt < 16; ++cnt)
12252 fprintf (stderr, "%02x", checksum[cnt]);
12253 putc ('\n', stderr);
12254 }
12255
12256 static void
12257 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12258 {
12259 internal_error ("fold check: original tree changed by fold");
12260 }
12261
12262 static void
12263 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12264 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12265 {
12266 const tree_node **slot;
12267 enum tree_code code;
12268 union tree_node buf;
12269 int i, len;
12270
12271 recursive_label:
12272 if (expr == NULL)
12273 return;
12274 slot = ht->find_slot (expr, INSERT);
12275 if (*slot != NULL)
12276 return;
12277 *slot = expr;
12278 code = TREE_CODE (expr);
12279 if (TREE_CODE_CLASS (code) == tcc_declaration
12280 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12281 {
12282 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12283 memcpy ((char *) &buf, expr, tree_size (expr));
12284 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12285 buf.decl_with_vis.symtab_node = NULL;
12286 expr = (tree) &buf;
12287 }
12288 else if (TREE_CODE_CLASS (code) == tcc_type
12289 && (TYPE_POINTER_TO (expr)
12290 || TYPE_REFERENCE_TO (expr)
12291 || TYPE_CACHED_VALUES_P (expr)
12292 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12293 || TYPE_NEXT_VARIANT (expr)))
12294 {
12295 /* Allow these fields to be modified. */
12296 tree tmp;
12297 memcpy ((char *) &buf, expr, tree_size (expr));
12298 expr = tmp = (tree) &buf;
12299 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12300 TYPE_POINTER_TO (tmp) = NULL;
12301 TYPE_REFERENCE_TO (tmp) = NULL;
12302 TYPE_NEXT_VARIANT (tmp) = NULL;
12303 if (TYPE_CACHED_VALUES_P (tmp))
12304 {
12305 TYPE_CACHED_VALUES_P (tmp) = 0;
12306 TYPE_CACHED_VALUES (tmp) = NULL;
12307 }
12308 }
12309 md5_process_bytes (expr, tree_size (expr), ctx);
12310 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12311 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12312 if (TREE_CODE_CLASS (code) != tcc_type
12313 && TREE_CODE_CLASS (code) != tcc_declaration
12314 && code != TREE_LIST
12315 && code != SSA_NAME
12316 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12317 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12318 switch (TREE_CODE_CLASS (code))
12319 {
12320 case tcc_constant:
12321 switch (code)
12322 {
12323 case STRING_CST:
12324 md5_process_bytes (TREE_STRING_POINTER (expr),
12325 TREE_STRING_LENGTH (expr), ctx);
12326 break;
12327 case COMPLEX_CST:
12328 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12329 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12330 break;
12331 case VECTOR_CST:
12332 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12333 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12334 break;
12335 default:
12336 break;
12337 }
12338 break;
12339 case tcc_exceptional:
12340 switch (code)
12341 {
12342 case TREE_LIST:
12343 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12344 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12345 expr = TREE_CHAIN (expr);
12346 goto recursive_label;
12347 break;
12348 case TREE_VEC:
12349 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12350 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12351 break;
12352 default:
12353 break;
12354 }
12355 break;
12356 case tcc_expression:
12357 case tcc_reference:
12358 case tcc_comparison:
12359 case tcc_unary:
12360 case tcc_binary:
12361 case tcc_statement:
12362 case tcc_vl_exp:
12363 len = TREE_OPERAND_LENGTH (expr);
12364 for (i = 0; i < len; ++i)
12365 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12366 break;
12367 case tcc_declaration:
12368 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12369 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12370 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12371 {
12372 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12373 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12374 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12375 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12376 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12377 }
12378
12379 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12380 {
12381 if (TREE_CODE (expr) == FUNCTION_DECL)
12382 {
12383 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12384 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12385 }
12386 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12387 }
12388 break;
12389 case tcc_type:
12390 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12391 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12392 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12393 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12394 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12395 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12396 if (INTEGRAL_TYPE_P (expr)
12397 || SCALAR_FLOAT_TYPE_P (expr))
12398 {
12399 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12400 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12401 }
12402 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12403 if (TREE_CODE (expr) == RECORD_TYPE
12404 || TREE_CODE (expr) == UNION_TYPE
12405 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12406 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12407 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12408 break;
12409 default:
12410 break;
12411 }
12412 }
12413
12414 /* Helper function for outputting the checksum of a tree T. When
12415 debugging with gdb, you can "define mynext" to be "next" followed
12416 by "call debug_fold_checksum (op0)", then just trace down till the
12417 outputs differ. */
12418
12419 DEBUG_FUNCTION void
12420 debug_fold_checksum (const_tree t)
12421 {
12422 int i;
12423 unsigned char checksum[16];
12424 struct md5_ctx ctx;
12425 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12426
12427 md5_init_ctx (&ctx);
12428 fold_checksum_tree (t, &ctx, &ht);
12429 md5_finish_ctx (&ctx, checksum);
12430 ht.empty ();
12431
12432 for (i = 0; i < 16; i++)
12433 fprintf (stderr, "%d ", checksum[i]);
12434
12435 fprintf (stderr, "\n");
12436 }
12437
12438 #endif
12439
12440 /* Fold a unary tree expression with code CODE of type TYPE with an
12441 operand OP0. LOC is the location of the resulting expression.
12442 Return a folded expression if successful. Otherwise, return a tree
12443 expression with code CODE of type TYPE with an operand OP0. */
12444
12445 tree
12446 fold_build1_stat_loc (location_t loc,
12447 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12448 {
12449 tree tem;
12450 #ifdef ENABLE_FOLD_CHECKING
12451 unsigned char checksum_before[16], checksum_after[16];
12452 struct md5_ctx ctx;
12453 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12454
12455 md5_init_ctx (&ctx);
12456 fold_checksum_tree (op0, &ctx, &ht);
12457 md5_finish_ctx (&ctx, checksum_before);
12458 ht.empty ();
12459 #endif
12460
12461 tem = fold_unary_loc (loc, code, type, op0);
12462 if (!tem)
12463 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12464
12465 #ifdef ENABLE_FOLD_CHECKING
12466 md5_init_ctx (&ctx);
12467 fold_checksum_tree (op0, &ctx, &ht);
12468 md5_finish_ctx (&ctx, checksum_after);
12469
12470 if (memcmp (checksum_before, checksum_after, 16))
12471 fold_check_failed (op0, tem);
12472 #endif
12473 return tem;
12474 }
12475
12476 /* Fold a binary tree expression with code CODE of type TYPE with
12477 operands OP0 and OP1. LOC is the location of the resulting
12478 expression. Return a folded expression if successful. Otherwise,
12479 return a tree expression with code CODE of type TYPE with operands
12480 OP0 and OP1. */
12481
12482 tree
12483 fold_build2_stat_loc (location_t loc,
12484 enum tree_code code, tree type, tree op0, tree op1
12485 MEM_STAT_DECL)
12486 {
12487 tree tem;
12488 #ifdef ENABLE_FOLD_CHECKING
12489 unsigned char checksum_before_op0[16],
12490 checksum_before_op1[16],
12491 checksum_after_op0[16],
12492 checksum_after_op1[16];
12493 struct md5_ctx ctx;
12494 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12495
12496 md5_init_ctx (&ctx);
12497 fold_checksum_tree (op0, &ctx, &ht);
12498 md5_finish_ctx (&ctx, checksum_before_op0);
12499 ht.empty ();
12500
12501 md5_init_ctx (&ctx);
12502 fold_checksum_tree (op1, &ctx, &ht);
12503 md5_finish_ctx (&ctx, checksum_before_op1);
12504 ht.empty ();
12505 #endif
12506
12507 tem = fold_binary_loc (loc, code, type, op0, op1);
12508 if (!tem)
12509 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12510
12511 #ifdef ENABLE_FOLD_CHECKING
12512 md5_init_ctx (&ctx);
12513 fold_checksum_tree (op0, &ctx, &ht);
12514 md5_finish_ctx (&ctx, checksum_after_op0);
12515 ht.empty ();
12516
12517 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12518 fold_check_failed (op0, tem);
12519
12520 md5_init_ctx (&ctx);
12521 fold_checksum_tree (op1, &ctx, &ht);
12522 md5_finish_ctx (&ctx, checksum_after_op1);
12523
12524 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12525 fold_check_failed (op1, tem);
12526 #endif
12527 return tem;
12528 }
12529
12530 /* Fold a ternary tree expression with code CODE of type TYPE with
12531 operands OP0, OP1, and OP2. Return a folded expression if
12532 successful. Otherwise, return a tree expression with code CODE of
12533 type TYPE with operands OP0, OP1, and OP2. */
12534
12535 tree
12536 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12537 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12538 {
12539 tree tem;
12540 #ifdef ENABLE_FOLD_CHECKING
12541 unsigned char checksum_before_op0[16],
12542 checksum_before_op1[16],
12543 checksum_before_op2[16],
12544 checksum_after_op0[16],
12545 checksum_after_op1[16],
12546 checksum_after_op2[16];
12547 struct md5_ctx ctx;
12548 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12549
12550 md5_init_ctx (&ctx);
12551 fold_checksum_tree (op0, &ctx, &ht);
12552 md5_finish_ctx (&ctx, checksum_before_op0);
12553 ht.empty ();
12554
12555 md5_init_ctx (&ctx);
12556 fold_checksum_tree (op1, &ctx, &ht);
12557 md5_finish_ctx (&ctx, checksum_before_op1);
12558 ht.empty ();
12559
12560 md5_init_ctx (&ctx);
12561 fold_checksum_tree (op2, &ctx, &ht);
12562 md5_finish_ctx (&ctx, checksum_before_op2);
12563 ht.empty ();
12564 #endif
12565
12566 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12567 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12568 if (!tem)
12569 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12570
12571 #ifdef ENABLE_FOLD_CHECKING
12572 md5_init_ctx (&ctx);
12573 fold_checksum_tree (op0, &ctx, &ht);
12574 md5_finish_ctx (&ctx, checksum_after_op0);
12575 ht.empty ();
12576
12577 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12578 fold_check_failed (op0, tem);
12579
12580 md5_init_ctx (&ctx);
12581 fold_checksum_tree (op1, &ctx, &ht);
12582 md5_finish_ctx (&ctx, checksum_after_op1);
12583 ht.empty ();
12584
12585 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12586 fold_check_failed (op1, tem);
12587
12588 md5_init_ctx (&ctx);
12589 fold_checksum_tree (op2, &ctx, &ht);
12590 md5_finish_ctx (&ctx, checksum_after_op2);
12591
12592 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12593 fold_check_failed (op2, tem);
12594 #endif
12595 return tem;
12596 }
12597
12598 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12599 arguments in ARGARRAY, and a null static chain.
12600 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12601 of type TYPE from the given operands as constructed by build_call_array. */
12602
12603 tree
12604 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12605 int nargs, tree *argarray)
12606 {
12607 tree tem;
12608 #ifdef ENABLE_FOLD_CHECKING
12609 unsigned char checksum_before_fn[16],
12610 checksum_before_arglist[16],
12611 checksum_after_fn[16],
12612 checksum_after_arglist[16];
12613 struct md5_ctx ctx;
12614 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12615 int i;
12616
12617 md5_init_ctx (&ctx);
12618 fold_checksum_tree (fn, &ctx, &ht);
12619 md5_finish_ctx (&ctx, checksum_before_fn);
12620 ht.empty ();
12621
12622 md5_init_ctx (&ctx);
12623 for (i = 0; i < nargs; i++)
12624 fold_checksum_tree (argarray[i], &ctx, &ht);
12625 md5_finish_ctx (&ctx, checksum_before_arglist);
12626 ht.empty ();
12627 #endif
12628
12629 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12630 if (!tem)
12631 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12632
12633 #ifdef ENABLE_FOLD_CHECKING
12634 md5_init_ctx (&ctx);
12635 fold_checksum_tree (fn, &ctx, &ht);
12636 md5_finish_ctx (&ctx, checksum_after_fn);
12637 ht.empty ();
12638
12639 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12640 fold_check_failed (fn, tem);
12641
12642 md5_init_ctx (&ctx);
12643 for (i = 0; i < nargs; i++)
12644 fold_checksum_tree (argarray[i], &ctx, &ht);
12645 md5_finish_ctx (&ctx, checksum_after_arglist);
12646
12647 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12648 fold_check_failed (NULL_TREE, tem);
12649 #endif
12650 return tem;
12651 }
12652
12653 /* Perform constant folding and related simplification of initializer
12654 expression EXPR. These behave identically to "fold_buildN" but ignore
12655 potential run-time traps and exceptions that fold must preserve. */
12656
12657 #define START_FOLD_INIT \
12658 int saved_signaling_nans = flag_signaling_nans;\
12659 int saved_trapping_math = flag_trapping_math;\
12660 int saved_rounding_math = flag_rounding_math;\
12661 int saved_trapv = flag_trapv;\
12662 int saved_folding_initializer = folding_initializer;\
12663 flag_signaling_nans = 0;\
12664 flag_trapping_math = 0;\
12665 flag_rounding_math = 0;\
12666 flag_trapv = 0;\
12667 folding_initializer = 1;
12668
12669 #define END_FOLD_INIT \
12670 flag_signaling_nans = saved_signaling_nans;\
12671 flag_trapping_math = saved_trapping_math;\
12672 flag_rounding_math = saved_rounding_math;\
12673 flag_trapv = saved_trapv;\
12674 folding_initializer = saved_folding_initializer;
12675
12676 tree
12677 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12678 tree type, tree op)
12679 {
12680 tree result;
12681 START_FOLD_INIT;
12682
12683 result = fold_build1_loc (loc, code, type, op);
12684
12685 END_FOLD_INIT;
12686 return result;
12687 }
12688
12689 tree
12690 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12691 tree type, tree op0, tree op1)
12692 {
12693 tree result;
12694 START_FOLD_INIT;
12695
12696 result = fold_build2_loc (loc, code, type, op0, op1);
12697
12698 END_FOLD_INIT;
12699 return result;
12700 }
12701
12702 tree
12703 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12704 int nargs, tree *argarray)
12705 {
12706 tree result;
12707 START_FOLD_INIT;
12708
12709 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12710
12711 END_FOLD_INIT;
12712 return result;
12713 }
12714
12715 #undef START_FOLD_INIT
12716 #undef END_FOLD_INIT
12717
12718 /* Determine if first argument is a multiple of second argument. Return 0 if
12719 it is not, or we cannot easily determined it to be.
12720
12721 An example of the sort of thing we care about (at this point; this routine
12722 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12723 fold cases do now) is discovering that
12724
12725 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12726
12727 is a multiple of
12728
12729 SAVE_EXPR (J * 8)
12730
12731 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12732
12733 This code also handles discovering that
12734
12735 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12736
12737 is a multiple of 8 so we don't have to worry about dealing with a
12738 possible remainder.
12739
12740 Note that we *look* inside a SAVE_EXPR only to determine how it was
12741 calculated; it is not safe for fold to do much of anything else with the
12742 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12743 at run time. For example, the latter example above *cannot* be implemented
12744 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12745 evaluation time of the original SAVE_EXPR is not necessarily the same at
12746 the time the new expression is evaluated. The only optimization of this
12747 sort that would be valid is changing
12748
12749 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12750
12751 divided by 8 to
12752
12753 SAVE_EXPR (I) * SAVE_EXPR (J)
12754
12755 (where the same SAVE_EXPR (J) is used in the original and the
12756 transformed version). */
12757
12758 int
12759 multiple_of_p (tree type, const_tree top, const_tree bottom)
12760 {
12761 if (operand_equal_p (top, bottom, 0))
12762 return 1;
12763
12764 if (TREE_CODE (type) != INTEGER_TYPE)
12765 return 0;
12766
12767 switch (TREE_CODE (top))
12768 {
12769 case BIT_AND_EXPR:
12770 /* Bitwise and provides a power of two multiple. If the mask is
12771 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12772 if (!integer_pow2p (bottom))
12773 return 0;
12774 /* FALLTHRU */
12775
12776 case MULT_EXPR:
12777 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12778 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12779
12780 case PLUS_EXPR:
12781 case MINUS_EXPR:
12782 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12783 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12784
12785 case LSHIFT_EXPR:
12786 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12787 {
12788 tree op1, t1;
12789
12790 op1 = TREE_OPERAND (top, 1);
12791 /* const_binop may not detect overflow correctly,
12792 so check for it explicitly here. */
12793 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12794 && 0 != (t1 = fold_convert (type,
12795 const_binop (LSHIFT_EXPR,
12796 size_one_node,
12797 op1)))
12798 && !TREE_OVERFLOW (t1))
12799 return multiple_of_p (type, t1, bottom);
12800 }
12801 return 0;
12802
12803 case NOP_EXPR:
12804 /* Can't handle conversions from non-integral or wider integral type. */
12805 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12806 || (TYPE_PRECISION (type)
12807 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12808 return 0;
12809
12810 /* .. fall through ... */
12811
12812 case SAVE_EXPR:
12813 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12814
12815 case COND_EXPR:
12816 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12817 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12818
12819 case INTEGER_CST:
12820 if (TREE_CODE (bottom) != INTEGER_CST
12821 || integer_zerop (bottom)
12822 || (TYPE_UNSIGNED (type)
12823 && (tree_int_cst_sgn (top) < 0
12824 || tree_int_cst_sgn (bottom) < 0)))
12825 return 0;
12826 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12827 SIGNED);
12828
12829 default:
12830 return 0;
12831 }
12832 }
12833
12834 #define tree_expr_nonnegative_warnv_p(X, Y) \
12835 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12836
12837 #define RECURSE(X) \
12838 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12839
12840 /* Return true if CODE or TYPE is known to be non-negative. */
12841
12842 static bool
12843 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12844 {
12845 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12846 && truth_value_p (code))
12847 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12848 have a signed:1 type (where the value is -1 and 0). */
12849 return true;
12850 return false;
12851 }
12852
12853 /* Return true if (CODE OP0) is known to be non-negative. If the return
12854 value is based on the assumption that signed overflow is undefined,
12855 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12856 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12857
12858 bool
12859 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12860 bool *strict_overflow_p, int depth)
12861 {
12862 if (TYPE_UNSIGNED (type))
12863 return true;
12864
12865 switch (code)
12866 {
12867 case ABS_EXPR:
12868 /* We can't return 1 if flag_wrapv is set because
12869 ABS_EXPR<INT_MIN> = INT_MIN. */
12870 if (!ANY_INTEGRAL_TYPE_P (type))
12871 return true;
12872 if (TYPE_OVERFLOW_UNDEFINED (type))
12873 {
12874 *strict_overflow_p = true;
12875 return true;
12876 }
12877 break;
12878
12879 case NON_LVALUE_EXPR:
12880 case FLOAT_EXPR:
12881 case FIX_TRUNC_EXPR:
12882 return RECURSE (op0);
12883
12884 CASE_CONVERT:
12885 {
12886 tree inner_type = TREE_TYPE (op0);
12887 tree outer_type = type;
12888
12889 if (TREE_CODE (outer_type) == REAL_TYPE)
12890 {
12891 if (TREE_CODE (inner_type) == REAL_TYPE)
12892 return RECURSE (op0);
12893 if (INTEGRAL_TYPE_P (inner_type))
12894 {
12895 if (TYPE_UNSIGNED (inner_type))
12896 return true;
12897 return RECURSE (op0);
12898 }
12899 }
12900 else if (INTEGRAL_TYPE_P (outer_type))
12901 {
12902 if (TREE_CODE (inner_type) == REAL_TYPE)
12903 return RECURSE (op0);
12904 if (INTEGRAL_TYPE_P (inner_type))
12905 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12906 && TYPE_UNSIGNED (inner_type);
12907 }
12908 }
12909 break;
12910
12911 default:
12912 return tree_simple_nonnegative_warnv_p (code, type);
12913 }
12914
12915 /* We don't know sign of `t', so be conservative and return false. */
12916 return false;
12917 }
12918
12919 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12920 value is based on the assumption that signed overflow is undefined,
12921 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12922 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12923
12924 bool
12925 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12926 tree op1, bool *strict_overflow_p,
12927 int depth)
12928 {
12929 if (TYPE_UNSIGNED (type))
12930 return true;
12931
12932 switch (code)
12933 {
12934 case POINTER_PLUS_EXPR:
12935 case PLUS_EXPR:
12936 if (FLOAT_TYPE_P (type))
12937 return RECURSE (op0) && RECURSE (op1);
12938
12939 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12940 both unsigned and at least 2 bits shorter than the result. */
12941 if (TREE_CODE (type) == INTEGER_TYPE
12942 && TREE_CODE (op0) == NOP_EXPR
12943 && TREE_CODE (op1) == NOP_EXPR)
12944 {
12945 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12946 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12947 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12948 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12949 {
12950 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12951 TYPE_PRECISION (inner2)) + 1;
12952 return prec < TYPE_PRECISION (type);
12953 }
12954 }
12955 break;
12956
12957 case MULT_EXPR:
12958 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12959 {
12960 /* x * x is always non-negative for floating point x
12961 or without overflow. */
12962 if (operand_equal_p (op0, op1, 0)
12963 || (RECURSE (op0) && RECURSE (op1)))
12964 {
12965 if (ANY_INTEGRAL_TYPE_P (type)
12966 && TYPE_OVERFLOW_UNDEFINED (type))
12967 *strict_overflow_p = true;
12968 return true;
12969 }
12970 }
12971
12972 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12973 both unsigned and their total bits is shorter than the result. */
12974 if (TREE_CODE (type) == INTEGER_TYPE
12975 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12976 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12977 {
12978 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12979 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12980 : TREE_TYPE (op0);
12981 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12982 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12983 : TREE_TYPE (op1);
12984
12985 bool unsigned0 = TYPE_UNSIGNED (inner0);
12986 bool unsigned1 = TYPE_UNSIGNED (inner1);
12987
12988 if (TREE_CODE (op0) == INTEGER_CST)
12989 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12990
12991 if (TREE_CODE (op1) == INTEGER_CST)
12992 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12993
12994 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12995 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12996 {
12997 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12998 ? tree_int_cst_min_precision (op0, UNSIGNED)
12999 : TYPE_PRECISION (inner0);
13000
13001 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
13002 ? tree_int_cst_min_precision (op1, UNSIGNED)
13003 : TYPE_PRECISION (inner1);
13004
13005 return precision0 + precision1 < TYPE_PRECISION (type);
13006 }
13007 }
13008 return false;
13009
13010 case BIT_AND_EXPR:
13011 case MAX_EXPR:
13012 return RECURSE (op0) || RECURSE (op1);
13013
13014 case BIT_IOR_EXPR:
13015 case BIT_XOR_EXPR:
13016 case MIN_EXPR:
13017 case RDIV_EXPR:
13018 case TRUNC_DIV_EXPR:
13019 case CEIL_DIV_EXPR:
13020 case FLOOR_DIV_EXPR:
13021 case ROUND_DIV_EXPR:
13022 return RECURSE (op0) && RECURSE (op1);
13023
13024 case TRUNC_MOD_EXPR:
13025 return RECURSE (op0);
13026
13027 case FLOOR_MOD_EXPR:
13028 return RECURSE (op1);
13029
13030 case CEIL_MOD_EXPR:
13031 case ROUND_MOD_EXPR:
13032 default:
13033 return tree_simple_nonnegative_warnv_p (code, type);
13034 }
13035
13036 /* We don't know sign of `t', so be conservative and return false. */
13037 return false;
13038 }
13039
13040 /* Return true if T is known to be non-negative. If the return
13041 value is based on the assumption that signed overflow is undefined,
13042 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13043 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13044
13045 bool
13046 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13047 {
13048 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13049 return true;
13050
13051 switch (TREE_CODE (t))
13052 {
13053 case INTEGER_CST:
13054 return tree_int_cst_sgn (t) >= 0;
13055
13056 case REAL_CST:
13057 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
13058
13059 case FIXED_CST:
13060 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
13061
13062 case COND_EXPR:
13063 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13064
13065 case SSA_NAME:
13066 /* Limit the depth of recursion to avoid quadratic behavior.
13067 This is expected to catch almost all occurrences in practice.
13068 If this code misses important cases that unbounded recursion
13069 would not, passes that need this information could be revised
13070 to provide it through dataflow propagation. */
13071 return (!name_registered_for_update_p (t)
13072 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13073 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
13074 strict_overflow_p, depth));
13075
13076 default:
13077 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13078 }
13079 }
13080
13081 /* Return true if T is known to be non-negative. If the return
13082 value is based on the assumption that signed overflow is undefined,
13083 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13084 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13085
13086 bool
13087 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13088 bool *strict_overflow_p, int depth)
13089 {
13090 switch (fn)
13091 {
13092 CASE_CFN_ACOS:
13093 CASE_CFN_ACOSH:
13094 CASE_CFN_CABS:
13095 CASE_CFN_COSH:
13096 CASE_CFN_ERFC:
13097 CASE_CFN_EXP:
13098 CASE_CFN_EXP10:
13099 CASE_CFN_EXP2:
13100 CASE_CFN_FABS:
13101 CASE_CFN_FDIM:
13102 CASE_CFN_HYPOT:
13103 CASE_CFN_POW10:
13104 CASE_CFN_FFS:
13105 CASE_CFN_PARITY:
13106 CASE_CFN_POPCOUNT:
13107 CASE_CFN_CLZ:
13108 CASE_CFN_CLRSB:
13109 case CFN_BUILT_IN_BSWAP32:
13110 case CFN_BUILT_IN_BSWAP64:
13111 /* Always true. */
13112 return true;
13113
13114 CASE_CFN_SQRT:
13115 /* sqrt(-0.0) is -0.0. */
13116 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13117 return true;
13118 return RECURSE (arg0);
13119
13120 CASE_CFN_ASINH:
13121 CASE_CFN_ATAN:
13122 CASE_CFN_ATANH:
13123 CASE_CFN_CBRT:
13124 CASE_CFN_CEIL:
13125 CASE_CFN_ERF:
13126 CASE_CFN_EXPM1:
13127 CASE_CFN_FLOOR:
13128 CASE_CFN_FMOD:
13129 CASE_CFN_FREXP:
13130 CASE_CFN_ICEIL:
13131 CASE_CFN_IFLOOR:
13132 CASE_CFN_IRINT:
13133 CASE_CFN_IROUND:
13134 CASE_CFN_LCEIL:
13135 CASE_CFN_LDEXP:
13136 CASE_CFN_LFLOOR:
13137 CASE_CFN_LLCEIL:
13138 CASE_CFN_LLFLOOR:
13139 CASE_CFN_LLRINT:
13140 CASE_CFN_LLROUND:
13141 CASE_CFN_LRINT:
13142 CASE_CFN_LROUND:
13143 CASE_CFN_MODF:
13144 CASE_CFN_NEARBYINT:
13145 CASE_CFN_RINT:
13146 CASE_CFN_ROUND:
13147 CASE_CFN_SCALB:
13148 CASE_CFN_SCALBLN:
13149 CASE_CFN_SCALBN:
13150 CASE_CFN_SIGNBIT:
13151 CASE_CFN_SIGNIFICAND:
13152 CASE_CFN_SINH:
13153 CASE_CFN_TANH:
13154 CASE_CFN_TRUNC:
13155 /* True if the 1st argument is nonnegative. */
13156 return RECURSE (arg0);
13157
13158 CASE_CFN_FMAX:
13159 /* True if the 1st OR 2nd arguments are nonnegative. */
13160 return RECURSE (arg0) || RECURSE (arg1);
13161
13162 CASE_CFN_FMIN:
13163 /* True if the 1st AND 2nd arguments are nonnegative. */
13164 return RECURSE (arg0) && RECURSE (arg1);
13165
13166 CASE_CFN_COPYSIGN:
13167 /* True if the 2nd argument is nonnegative. */
13168 return RECURSE (arg1);
13169
13170 CASE_CFN_POWI:
13171 /* True if the 1st argument is nonnegative or the second
13172 argument is an even integer. */
13173 if (TREE_CODE (arg1) == INTEGER_CST
13174 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13175 return true;
13176 return RECURSE (arg0);
13177
13178 CASE_CFN_POW:
13179 /* True if the 1st argument is nonnegative or the second
13180 argument is an even integer valued real. */
13181 if (TREE_CODE (arg1) == REAL_CST)
13182 {
13183 REAL_VALUE_TYPE c;
13184 HOST_WIDE_INT n;
13185
13186 c = TREE_REAL_CST (arg1);
13187 n = real_to_integer (&c);
13188 if ((n & 1) == 0)
13189 {
13190 REAL_VALUE_TYPE cint;
13191 real_from_integer (&cint, VOIDmode, n, SIGNED);
13192 if (real_identical (&c, &cint))
13193 return true;
13194 }
13195 }
13196 return RECURSE (arg0);
13197
13198 default:
13199 break;
13200 }
13201 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13202 }
13203
13204 /* Return true if T is known to be non-negative. If the return
13205 value is based on the assumption that signed overflow is undefined,
13206 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13207 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13208
13209 static bool
13210 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13211 {
13212 enum tree_code code = TREE_CODE (t);
13213 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13214 return true;
13215
13216 switch (code)
13217 {
13218 case TARGET_EXPR:
13219 {
13220 tree temp = TARGET_EXPR_SLOT (t);
13221 t = TARGET_EXPR_INITIAL (t);
13222
13223 /* If the initializer is non-void, then it's a normal expression
13224 that will be assigned to the slot. */
13225 if (!VOID_TYPE_P (t))
13226 return RECURSE (t);
13227
13228 /* Otherwise, the initializer sets the slot in some way. One common
13229 way is an assignment statement at the end of the initializer. */
13230 while (1)
13231 {
13232 if (TREE_CODE (t) == BIND_EXPR)
13233 t = expr_last (BIND_EXPR_BODY (t));
13234 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13235 || TREE_CODE (t) == TRY_CATCH_EXPR)
13236 t = expr_last (TREE_OPERAND (t, 0));
13237 else if (TREE_CODE (t) == STATEMENT_LIST)
13238 t = expr_last (t);
13239 else
13240 break;
13241 }
13242 if (TREE_CODE (t) == MODIFY_EXPR
13243 && TREE_OPERAND (t, 0) == temp)
13244 return RECURSE (TREE_OPERAND (t, 1));
13245
13246 return false;
13247 }
13248
13249 case CALL_EXPR:
13250 {
13251 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13252 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13253
13254 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13255 get_call_combined_fn (t),
13256 arg0,
13257 arg1,
13258 strict_overflow_p, depth);
13259 }
13260 case COMPOUND_EXPR:
13261 case MODIFY_EXPR:
13262 return RECURSE (TREE_OPERAND (t, 1));
13263
13264 case BIND_EXPR:
13265 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13266
13267 case SAVE_EXPR:
13268 return RECURSE (TREE_OPERAND (t, 0));
13269
13270 default:
13271 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13272 }
13273 }
13274
13275 #undef RECURSE
13276 #undef tree_expr_nonnegative_warnv_p
13277
13278 /* Return true if T is known to be non-negative. If the return
13279 value is based on the assumption that signed overflow is undefined,
13280 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13281 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13282
13283 bool
13284 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13285 {
13286 enum tree_code code;
13287 if (t == error_mark_node)
13288 return false;
13289
13290 code = TREE_CODE (t);
13291 switch (TREE_CODE_CLASS (code))
13292 {
13293 case tcc_binary:
13294 case tcc_comparison:
13295 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13296 TREE_TYPE (t),
13297 TREE_OPERAND (t, 0),
13298 TREE_OPERAND (t, 1),
13299 strict_overflow_p, depth);
13300
13301 case tcc_unary:
13302 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13303 TREE_TYPE (t),
13304 TREE_OPERAND (t, 0),
13305 strict_overflow_p, depth);
13306
13307 case tcc_constant:
13308 case tcc_declaration:
13309 case tcc_reference:
13310 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13311
13312 default:
13313 break;
13314 }
13315
13316 switch (code)
13317 {
13318 case TRUTH_AND_EXPR:
13319 case TRUTH_OR_EXPR:
13320 case TRUTH_XOR_EXPR:
13321 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13322 TREE_TYPE (t),
13323 TREE_OPERAND (t, 0),
13324 TREE_OPERAND (t, 1),
13325 strict_overflow_p, depth);
13326 case TRUTH_NOT_EXPR:
13327 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13328 TREE_TYPE (t),
13329 TREE_OPERAND (t, 0),
13330 strict_overflow_p, depth);
13331
13332 case COND_EXPR:
13333 case CONSTRUCTOR:
13334 case OBJ_TYPE_REF:
13335 case ASSERT_EXPR:
13336 case ADDR_EXPR:
13337 case WITH_SIZE_EXPR:
13338 case SSA_NAME:
13339 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13340
13341 default:
13342 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13343 }
13344 }
13345
13346 /* Return true if `t' is known to be non-negative. Handle warnings
13347 about undefined signed overflow. */
13348
13349 bool
13350 tree_expr_nonnegative_p (tree t)
13351 {
13352 bool ret, strict_overflow_p;
13353
13354 strict_overflow_p = false;
13355 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13356 if (strict_overflow_p)
13357 fold_overflow_warning (("assuming signed overflow does not occur when "
13358 "determining that expression is always "
13359 "non-negative"),
13360 WARN_STRICT_OVERFLOW_MISC);
13361 return ret;
13362 }
13363
13364
13365 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13366 For floating point we further ensure that T is not denormal.
13367 Similar logic is present in nonzero_address in rtlanal.h.
13368
13369 If the return value is based on the assumption that signed overflow
13370 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13371 change *STRICT_OVERFLOW_P. */
13372
13373 bool
13374 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13375 bool *strict_overflow_p)
13376 {
13377 switch (code)
13378 {
13379 case ABS_EXPR:
13380 return tree_expr_nonzero_warnv_p (op0,
13381 strict_overflow_p);
13382
13383 case NOP_EXPR:
13384 {
13385 tree inner_type = TREE_TYPE (op0);
13386 tree outer_type = type;
13387
13388 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13389 && tree_expr_nonzero_warnv_p (op0,
13390 strict_overflow_p));
13391 }
13392 break;
13393
13394 case NON_LVALUE_EXPR:
13395 return tree_expr_nonzero_warnv_p (op0,
13396 strict_overflow_p);
13397
13398 default:
13399 break;
13400 }
13401
13402 return false;
13403 }
13404
13405 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13406 For floating point we further ensure that T is not denormal.
13407 Similar logic is present in nonzero_address in rtlanal.h.
13408
13409 If the return value is based on the assumption that signed overflow
13410 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13411 change *STRICT_OVERFLOW_P. */
13412
13413 bool
13414 tree_binary_nonzero_warnv_p (enum tree_code code,
13415 tree type,
13416 tree op0,
13417 tree op1, bool *strict_overflow_p)
13418 {
13419 bool sub_strict_overflow_p;
13420 switch (code)
13421 {
13422 case POINTER_PLUS_EXPR:
13423 case PLUS_EXPR:
13424 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13425 {
13426 /* With the presence of negative values it is hard
13427 to say something. */
13428 sub_strict_overflow_p = false;
13429 if (!tree_expr_nonnegative_warnv_p (op0,
13430 &sub_strict_overflow_p)
13431 || !tree_expr_nonnegative_warnv_p (op1,
13432 &sub_strict_overflow_p))
13433 return false;
13434 /* One of operands must be positive and the other non-negative. */
13435 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13436 overflows, on a twos-complement machine the sum of two
13437 nonnegative numbers can never be zero. */
13438 return (tree_expr_nonzero_warnv_p (op0,
13439 strict_overflow_p)
13440 || tree_expr_nonzero_warnv_p (op1,
13441 strict_overflow_p));
13442 }
13443 break;
13444
13445 case MULT_EXPR:
13446 if (TYPE_OVERFLOW_UNDEFINED (type))
13447 {
13448 if (tree_expr_nonzero_warnv_p (op0,
13449 strict_overflow_p)
13450 && tree_expr_nonzero_warnv_p (op1,
13451 strict_overflow_p))
13452 {
13453 *strict_overflow_p = true;
13454 return true;
13455 }
13456 }
13457 break;
13458
13459 case MIN_EXPR:
13460 sub_strict_overflow_p = false;
13461 if (tree_expr_nonzero_warnv_p (op0,
13462 &sub_strict_overflow_p)
13463 && tree_expr_nonzero_warnv_p (op1,
13464 &sub_strict_overflow_p))
13465 {
13466 if (sub_strict_overflow_p)
13467 *strict_overflow_p = true;
13468 }
13469 break;
13470
13471 case MAX_EXPR:
13472 sub_strict_overflow_p = false;
13473 if (tree_expr_nonzero_warnv_p (op0,
13474 &sub_strict_overflow_p))
13475 {
13476 if (sub_strict_overflow_p)
13477 *strict_overflow_p = true;
13478
13479 /* When both operands are nonzero, then MAX must be too. */
13480 if (tree_expr_nonzero_warnv_p (op1,
13481 strict_overflow_p))
13482 return true;
13483
13484 /* MAX where operand 0 is positive is positive. */
13485 return tree_expr_nonnegative_warnv_p (op0,
13486 strict_overflow_p);
13487 }
13488 /* MAX where operand 1 is positive is positive. */
13489 else if (tree_expr_nonzero_warnv_p (op1,
13490 &sub_strict_overflow_p)
13491 && tree_expr_nonnegative_warnv_p (op1,
13492 &sub_strict_overflow_p))
13493 {
13494 if (sub_strict_overflow_p)
13495 *strict_overflow_p = true;
13496 return true;
13497 }
13498 break;
13499
13500 case BIT_IOR_EXPR:
13501 return (tree_expr_nonzero_warnv_p (op1,
13502 strict_overflow_p)
13503 || tree_expr_nonzero_warnv_p (op0,
13504 strict_overflow_p));
13505
13506 default:
13507 break;
13508 }
13509
13510 return false;
13511 }
13512
13513 /* Return true when T is an address and is known to be nonzero.
13514 For floating point we further ensure that T is not denormal.
13515 Similar logic is present in nonzero_address in rtlanal.h.
13516
13517 If the return value is based on the assumption that signed overflow
13518 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13519 change *STRICT_OVERFLOW_P. */
13520
13521 bool
13522 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13523 {
13524 bool sub_strict_overflow_p;
13525 switch (TREE_CODE (t))
13526 {
13527 case INTEGER_CST:
13528 return !integer_zerop (t);
13529
13530 case ADDR_EXPR:
13531 {
13532 tree base = TREE_OPERAND (t, 0);
13533
13534 if (!DECL_P (base))
13535 base = get_base_address (base);
13536
13537 if (!base)
13538 return false;
13539
13540 /* For objects in symbol table check if we know they are non-zero.
13541 Don't do anything for variables and functions before symtab is built;
13542 it is quite possible that they will be declared weak later. */
13543 int nonzero_addr = maybe_nonzero_address (base);
13544 if (nonzero_addr >= 0)
13545 return nonzero_addr;
13546
13547 /* Function local objects are never NULL. */
13548 if (DECL_P (base)
13549 && (DECL_CONTEXT (base)
13550 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13551 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13552 return true;
13553
13554 /* Constants are never weak. */
13555 if (CONSTANT_CLASS_P (base))
13556 return true;
13557
13558 return false;
13559 }
13560
13561 case COND_EXPR:
13562 sub_strict_overflow_p = false;
13563 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13564 &sub_strict_overflow_p)
13565 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13566 &sub_strict_overflow_p))
13567 {
13568 if (sub_strict_overflow_p)
13569 *strict_overflow_p = true;
13570 return true;
13571 }
13572 break;
13573
13574 default:
13575 break;
13576 }
13577 return false;
13578 }
13579
13580 #define integer_valued_real_p(X) \
13581 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13582
13583 #define RECURSE(X) \
13584 ((integer_valued_real_p) (X, depth + 1))
13585
13586 /* Return true if the floating point result of (CODE OP0) has an
13587 integer value. We also allow +Inf, -Inf and NaN to be considered
13588 integer values. Return false for signaling NaN.
13589
13590 DEPTH is the current nesting depth of the query. */
13591
13592 bool
13593 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13594 {
13595 switch (code)
13596 {
13597 case FLOAT_EXPR:
13598 return true;
13599
13600 case ABS_EXPR:
13601 return RECURSE (op0);
13602
13603 CASE_CONVERT:
13604 {
13605 tree type = TREE_TYPE (op0);
13606 if (TREE_CODE (type) == INTEGER_TYPE)
13607 return true;
13608 if (TREE_CODE (type) == REAL_TYPE)
13609 return RECURSE (op0);
13610 break;
13611 }
13612
13613 default:
13614 break;
13615 }
13616 return false;
13617 }
13618
13619 /* Return true if the floating point result of (CODE OP0 OP1) has an
13620 integer value. We also allow +Inf, -Inf and NaN to be considered
13621 integer values. Return false for signaling NaN.
13622
13623 DEPTH is the current nesting depth of the query. */
13624
13625 bool
13626 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13627 {
13628 switch (code)
13629 {
13630 case PLUS_EXPR:
13631 case MINUS_EXPR:
13632 case MULT_EXPR:
13633 case MIN_EXPR:
13634 case MAX_EXPR:
13635 return RECURSE (op0) && RECURSE (op1);
13636
13637 default:
13638 break;
13639 }
13640 return false;
13641 }
13642
13643 /* Return true if the floating point result of calling FNDECL with arguments
13644 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13645 considered integer values. Return false for signaling NaN. If FNDECL
13646 takes fewer than 2 arguments, the remaining ARGn are null.
13647
13648 DEPTH is the current nesting depth of the query. */
13649
13650 bool
13651 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13652 {
13653 switch (fn)
13654 {
13655 CASE_CFN_CEIL:
13656 CASE_CFN_FLOOR:
13657 CASE_CFN_NEARBYINT:
13658 CASE_CFN_RINT:
13659 CASE_CFN_ROUND:
13660 CASE_CFN_TRUNC:
13661 return true;
13662
13663 CASE_CFN_FMIN:
13664 CASE_CFN_FMAX:
13665 return RECURSE (arg0) && RECURSE (arg1);
13666
13667 default:
13668 break;
13669 }
13670 return false;
13671 }
13672
13673 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13674 has an integer value. We also allow +Inf, -Inf and NaN to be
13675 considered integer values. Return false for signaling NaN.
13676
13677 DEPTH is the current nesting depth of the query. */
13678
13679 bool
13680 integer_valued_real_single_p (tree t, int depth)
13681 {
13682 switch (TREE_CODE (t))
13683 {
13684 case REAL_CST:
13685 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13686
13687 case COND_EXPR:
13688 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13689
13690 case SSA_NAME:
13691 /* Limit the depth of recursion to avoid quadratic behavior.
13692 This is expected to catch almost all occurrences in practice.
13693 If this code misses important cases that unbounded recursion
13694 would not, passes that need this information could be revised
13695 to provide it through dataflow propagation. */
13696 return (!name_registered_for_update_p (t)
13697 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13698 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13699 depth));
13700
13701 default:
13702 break;
13703 }
13704 return false;
13705 }
13706
13707 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13708 has an integer value. We also allow +Inf, -Inf and NaN to be
13709 considered integer values. Return false for signaling NaN.
13710
13711 DEPTH is the current nesting depth of the query. */
13712
13713 static bool
13714 integer_valued_real_invalid_p (tree t, int depth)
13715 {
13716 switch (TREE_CODE (t))
13717 {
13718 case COMPOUND_EXPR:
13719 case MODIFY_EXPR:
13720 case BIND_EXPR:
13721 return RECURSE (TREE_OPERAND (t, 1));
13722
13723 case SAVE_EXPR:
13724 return RECURSE (TREE_OPERAND (t, 0));
13725
13726 default:
13727 break;
13728 }
13729 return false;
13730 }
13731
13732 #undef RECURSE
13733 #undef integer_valued_real_p
13734
13735 /* Return true if the floating point expression T has an integer value.
13736 We also allow +Inf, -Inf and NaN to be considered integer values.
13737 Return false for signaling NaN.
13738
13739 DEPTH is the current nesting depth of the query. */
13740
13741 bool
13742 integer_valued_real_p (tree t, int depth)
13743 {
13744 if (t == error_mark_node)
13745 return false;
13746
13747 tree_code code = TREE_CODE (t);
13748 switch (TREE_CODE_CLASS (code))
13749 {
13750 case tcc_binary:
13751 case tcc_comparison:
13752 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13753 TREE_OPERAND (t, 1), depth);
13754
13755 case tcc_unary:
13756 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13757
13758 case tcc_constant:
13759 case tcc_declaration:
13760 case tcc_reference:
13761 return integer_valued_real_single_p (t, depth);
13762
13763 default:
13764 break;
13765 }
13766
13767 switch (code)
13768 {
13769 case COND_EXPR:
13770 case SSA_NAME:
13771 return integer_valued_real_single_p (t, depth);
13772
13773 case CALL_EXPR:
13774 {
13775 tree arg0 = (call_expr_nargs (t) > 0
13776 ? CALL_EXPR_ARG (t, 0)
13777 : NULL_TREE);
13778 tree arg1 = (call_expr_nargs (t) > 1
13779 ? CALL_EXPR_ARG (t, 1)
13780 : NULL_TREE);
13781 return integer_valued_real_call_p (get_call_combined_fn (t),
13782 arg0, arg1, depth);
13783 }
13784
13785 default:
13786 return integer_valued_real_invalid_p (t, depth);
13787 }
13788 }
13789
13790 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13791 attempt to fold the expression to a constant without modifying TYPE,
13792 OP0 or OP1.
13793
13794 If the expression could be simplified to a constant, then return
13795 the constant. If the expression would not be simplified to a
13796 constant, then return NULL_TREE. */
13797
13798 tree
13799 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13800 {
13801 tree tem = fold_binary (code, type, op0, op1);
13802 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13803 }
13804
13805 /* Given the components of a unary expression CODE, TYPE and OP0,
13806 attempt to fold the expression to a constant without modifying
13807 TYPE or OP0.
13808
13809 If the expression could be simplified to a constant, then return
13810 the constant. If the expression would not be simplified to a
13811 constant, then return NULL_TREE. */
13812
13813 tree
13814 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13815 {
13816 tree tem = fold_unary (code, type, op0);
13817 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13818 }
13819
13820 /* If EXP represents referencing an element in a constant string
13821 (either via pointer arithmetic or array indexing), return the
13822 tree representing the value accessed, otherwise return NULL. */
13823
13824 tree
13825 fold_read_from_constant_string (tree exp)
13826 {
13827 if ((TREE_CODE (exp) == INDIRECT_REF
13828 || TREE_CODE (exp) == ARRAY_REF)
13829 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13830 {
13831 tree exp1 = TREE_OPERAND (exp, 0);
13832 tree index;
13833 tree string;
13834 location_t loc = EXPR_LOCATION (exp);
13835
13836 if (TREE_CODE (exp) == INDIRECT_REF)
13837 string = string_constant (exp1, &index);
13838 else
13839 {
13840 tree low_bound = array_ref_low_bound (exp);
13841 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13842
13843 /* Optimize the special-case of a zero lower bound.
13844
13845 We convert the low_bound to sizetype to avoid some problems
13846 with constant folding. (E.g. suppose the lower bound is 1,
13847 and its mode is QI. Without the conversion,l (ARRAY
13848 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13849 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13850 if (! integer_zerop (low_bound))
13851 index = size_diffop_loc (loc, index,
13852 fold_convert_loc (loc, sizetype, low_bound));
13853
13854 string = exp1;
13855 }
13856
13857 if (string
13858 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13859 && TREE_CODE (string) == STRING_CST
13860 && TREE_CODE (index) == INTEGER_CST
13861 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13862 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13863 == MODE_INT)
13864 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13865 return build_int_cst_type (TREE_TYPE (exp),
13866 (TREE_STRING_POINTER (string)
13867 [TREE_INT_CST_LOW (index)]));
13868 }
13869 return NULL;
13870 }
13871
13872 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13873 an integer constant, real, or fixed-point constant.
13874
13875 TYPE is the type of the result. */
13876
13877 static tree
13878 fold_negate_const (tree arg0, tree type)
13879 {
13880 tree t = NULL_TREE;
13881
13882 switch (TREE_CODE (arg0))
13883 {
13884 case INTEGER_CST:
13885 {
13886 bool overflow;
13887 wide_int val = wi::neg (arg0, &overflow);
13888 t = force_fit_type (type, val, 1,
13889 (overflow | TREE_OVERFLOW (arg0))
13890 && !TYPE_UNSIGNED (type));
13891 break;
13892 }
13893
13894 case REAL_CST:
13895 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13896 break;
13897
13898 case FIXED_CST:
13899 {
13900 FIXED_VALUE_TYPE f;
13901 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13902 &(TREE_FIXED_CST (arg0)), NULL,
13903 TYPE_SATURATING (type));
13904 t = build_fixed (type, f);
13905 /* Propagate overflow flags. */
13906 if (overflow_p | TREE_OVERFLOW (arg0))
13907 TREE_OVERFLOW (t) = 1;
13908 break;
13909 }
13910
13911 default:
13912 gcc_unreachable ();
13913 }
13914
13915 return t;
13916 }
13917
13918 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13919 an integer constant or real constant.
13920
13921 TYPE is the type of the result. */
13922
13923 tree
13924 fold_abs_const (tree arg0, tree type)
13925 {
13926 tree t = NULL_TREE;
13927
13928 switch (TREE_CODE (arg0))
13929 {
13930 case INTEGER_CST:
13931 {
13932 /* If the value is unsigned or non-negative, then the absolute value
13933 is the same as the ordinary value. */
13934 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13935 t = arg0;
13936
13937 /* If the value is negative, then the absolute value is
13938 its negation. */
13939 else
13940 {
13941 bool overflow;
13942 wide_int val = wi::neg (arg0, &overflow);
13943 t = force_fit_type (type, val, -1,
13944 overflow | TREE_OVERFLOW (arg0));
13945 }
13946 }
13947 break;
13948
13949 case REAL_CST:
13950 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13951 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13952 else
13953 t = arg0;
13954 break;
13955
13956 default:
13957 gcc_unreachable ();
13958 }
13959
13960 return t;
13961 }
13962
13963 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13964 constant. TYPE is the type of the result. */
13965
13966 static tree
13967 fold_not_const (const_tree arg0, tree type)
13968 {
13969 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13970
13971 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13972 }
13973
13974 /* Given CODE, a relational operator, the target type, TYPE and two
13975 constant operands OP0 and OP1, return the result of the
13976 relational operation. If the result is not a compile time
13977 constant, then return NULL_TREE. */
13978
13979 static tree
13980 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13981 {
13982 int result, invert;
13983
13984 /* From here on, the only cases we handle are when the result is
13985 known to be a constant. */
13986
13987 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13988 {
13989 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13990 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13991
13992 /* Handle the cases where either operand is a NaN. */
13993 if (real_isnan (c0) || real_isnan (c1))
13994 {
13995 switch (code)
13996 {
13997 case EQ_EXPR:
13998 case ORDERED_EXPR:
13999 result = 0;
14000 break;
14001
14002 case NE_EXPR:
14003 case UNORDERED_EXPR:
14004 case UNLT_EXPR:
14005 case UNLE_EXPR:
14006 case UNGT_EXPR:
14007 case UNGE_EXPR:
14008 case UNEQ_EXPR:
14009 result = 1;
14010 break;
14011
14012 case LT_EXPR:
14013 case LE_EXPR:
14014 case GT_EXPR:
14015 case GE_EXPR:
14016 case LTGT_EXPR:
14017 if (flag_trapping_math)
14018 return NULL_TREE;
14019 result = 0;
14020 break;
14021
14022 default:
14023 gcc_unreachable ();
14024 }
14025
14026 return constant_boolean_node (result, type);
14027 }
14028
14029 return constant_boolean_node (real_compare (code, c0, c1), type);
14030 }
14031
14032 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
14033 {
14034 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
14035 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
14036 return constant_boolean_node (fixed_compare (code, c0, c1), type);
14037 }
14038
14039 /* Handle equality/inequality of complex constants. */
14040 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
14041 {
14042 tree rcond = fold_relational_const (code, type,
14043 TREE_REALPART (op0),
14044 TREE_REALPART (op1));
14045 tree icond = fold_relational_const (code, type,
14046 TREE_IMAGPART (op0),
14047 TREE_IMAGPART (op1));
14048 if (code == EQ_EXPR)
14049 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
14050 else if (code == NE_EXPR)
14051 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
14052 else
14053 return NULL_TREE;
14054 }
14055
14056 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
14057 {
14058 if (!VECTOR_TYPE_P (type))
14059 {
14060 /* Have vector comparison with scalar boolean result. */
14061 bool result = true;
14062 gcc_assert ((code == EQ_EXPR || code == NE_EXPR)
14063 && VECTOR_CST_NELTS (op0) == VECTOR_CST_NELTS (op1));
14064 for (unsigned i = 0; i < VECTOR_CST_NELTS (op0); i++)
14065 {
14066 tree elem0 = VECTOR_CST_ELT (op0, i);
14067 tree elem1 = VECTOR_CST_ELT (op1, i);
14068 tree tmp = fold_relational_const (code, type, elem0, elem1);
14069 result &= integer_onep (tmp);
14070 }
14071 if (code == NE_EXPR)
14072 result = !result;
14073 return constant_boolean_node (result, type);
14074 }
14075 unsigned count = VECTOR_CST_NELTS (op0);
14076 tree *elts = XALLOCAVEC (tree, count);
14077 gcc_assert (VECTOR_CST_NELTS (op1) == count
14078 && TYPE_VECTOR_SUBPARTS (type) == count);
14079
14080 for (unsigned i = 0; i < count; i++)
14081 {
14082 tree elem_type = TREE_TYPE (type);
14083 tree elem0 = VECTOR_CST_ELT (op0, i);
14084 tree elem1 = VECTOR_CST_ELT (op1, i);
14085
14086 tree tem = fold_relational_const (code, elem_type,
14087 elem0, elem1);
14088
14089 if (tem == NULL_TREE)
14090 return NULL_TREE;
14091
14092 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14093 }
14094
14095 return build_vector (type, elts);
14096 }
14097
14098 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14099
14100 To compute GT, swap the arguments and do LT.
14101 To compute GE, do LT and invert the result.
14102 To compute LE, swap the arguments, do LT and invert the result.
14103 To compute NE, do EQ and invert the result.
14104
14105 Therefore, the code below must handle only EQ and LT. */
14106
14107 if (code == LE_EXPR || code == GT_EXPR)
14108 {
14109 std::swap (op0, op1);
14110 code = swap_tree_comparison (code);
14111 }
14112
14113 /* Note that it is safe to invert for real values here because we
14114 have already handled the one case that it matters. */
14115
14116 invert = 0;
14117 if (code == NE_EXPR || code == GE_EXPR)
14118 {
14119 invert = 1;
14120 code = invert_tree_comparison (code, false);
14121 }
14122
14123 /* Compute a result for LT or EQ if args permit;
14124 Otherwise return T. */
14125 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14126 {
14127 if (code == EQ_EXPR)
14128 result = tree_int_cst_equal (op0, op1);
14129 else
14130 result = tree_int_cst_lt (op0, op1);
14131 }
14132 else
14133 return NULL_TREE;
14134
14135 if (invert)
14136 result ^= 1;
14137 return constant_boolean_node (result, type);
14138 }
14139
14140 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14141 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14142 itself. */
14143
14144 tree
14145 fold_build_cleanup_point_expr (tree type, tree expr)
14146 {
14147 /* If the expression does not have side effects then we don't have to wrap
14148 it with a cleanup point expression. */
14149 if (!TREE_SIDE_EFFECTS (expr))
14150 return expr;
14151
14152 /* If the expression is a return, check to see if the expression inside the
14153 return has no side effects or the right hand side of the modify expression
14154 inside the return. If either don't have side effects set we don't need to
14155 wrap the expression in a cleanup point expression. Note we don't check the
14156 left hand side of the modify because it should always be a return decl. */
14157 if (TREE_CODE (expr) == RETURN_EXPR)
14158 {
14159 tree op = TREE_OPERAND (expr, 0);
14160 if (!op || !TREE_SIDE_EFFECTS (op))
14161 return expr;
14162 op = TREE_OPERAND (op, 1);
14163 if (!TREE_SIDE_EFFECTS (op))
14164 return expr;
14165 }
14166
14167 return build1 (CLEANUP_POINT_EXPR, type, expr);
14168 }
14169
14170 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14171 of an indirection through OP0, or NULL_TREE if no simplification is
14172 possible. */
14173
14174 tree
14175 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14176 {
14177 tree sub = op0;
14178 tree subtype;
14179
14180 STRIP_NOPS (sub);
14181 subtype = TREE_TYPE (sub);
14182 if (!POINTER_TYPE_P (subtype))
14183 return NULL_TREE;
14184
14185 if (TREE_CODE (sub) == ADDR_EXPR)
14186 {
14187 tree op = TREE_OPERAND (sub, 0);
14188 tree optype = TREE_TYPE (op);
14189 /* *&CONST_DECL -> to the value of the const decl. */
14190 if (TREE_CODE (op) == CONST_DECL)
14191 return DECL_INITIAL (op);
14192 /* *&p => p; make sure to handle *&"str"[cst] here. */
14193 if (type == optype)
14194 {
14195 tree fop = fold_read_from_constant_string (op);
14196 if (fop)
14197 return fop;
14198 else
14199 return op;
14200 }
14201 /* *(foo *)&fooarray => fooarray[0] */
14202 else if (TREE_CODE (optype) == ARRAY_TYPE
14203 && type == TREE_TYPE (optype)
14204 && (!in_gimple_form
14205 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14206 {
14207 tree type_domain = TYPE_DOMAIN (optype);
14208 tree min_val = size_zero_node;
14209 if (type_domain && TYPE_MIN_VALUE (type_domain))
14210 min_val = TYPE_MIN_VALUE (type_domain);
14211 if (in_gimple_form
14212 && TREE_CODE (min_val) != INTEGER_CST)
14213 return NULL_TREE;
14214 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14215 NULL_TREE, NULL_TREE);
14216 }
14217 /* *(foo *)&complexfoo => __real__ complexfoo */
14218 else if (TREE_CODE (optype) == COMPLEX_TYPE
14219 && type == TREE_TYPE (optype))
14220 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14221 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14222 else if (TREE_CODE (optype) == VECTOR_TYPE
14223 && type == TREE_TYPE (optype))
14224 {
14225 tree part_width = TYPE_SIZE (type);
14226 tree index = bitsize_int (0);
14227 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14228 }
14229 }
14230
14231 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14232 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14233 {
14234 tree op00 = TREE_OPERAND (sub, 0);
14235 tree op01 = TREE_OPERAND (sub, 1);
14236
14237 STRIP_NOPS (op00);
14238 if (TREE_CODE (op00) == ADDR_EXPR)
14239 {
14240 tree op00type;
14241 op00 = TREE_OPERAND (op00, 0);
14242 op00type = TREE_TYPE (op00);
14243
14244 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14245 if (TREE_CODE (op00type) == VECTOR_TYPE
14246 && type == TREE_TYPE (op00type))
14247 {
14248 tree part_width = TYPE_SIZE (type);
14249 unsigned HOST_WIDE_INT max_offset
14250 = (tree_to_uhwi (part_width) / BITS_PER_UNIT
14251 * TYPE_VECTOR_SUBPARTS (op00type));
14252 if (tree_int_cst_sign_bit (op01) == 0
14253 && compare_tree_int (op01, max_offset) == -1)
14254 {
14255 unsigned HOST_WIDE_INT offset = tree_to_uhwi (op01);
14256 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14257 tree index = bitsize_int (indexi);
14258 return fold_build3_loc (loc,
14259 BIT_FIELD_REF, type, op00,
14260 part_width, index);
14261 }
14262 }
14263 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14264 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14265 && type == TREE_TYPE (op00type))
14266 {
14267 tree size = TYPE_SIZE_UNIT (type);
14268 if (tree_int_cst_equal (size, op01))
14269 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14270 }
14271 /* ((foo *)&fooarray)[1] => fooarray[1] */
14272 else if (TREE_CODE (op00type) == ARRAY_TYPE
14273 && type == TREE_TYPE (op00type))
14274 {
14275 tree type_domain = TYPE_DOMAIN (op00type);
14276 tree min_val = size_zero_node;
14277 if (type_domain && TYPE_MIN_VALUE (type_domain))
14278 min_val = TYPE_MIN_VALUE (type_domain);
14279 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14280 TYPE_SIZE_UNIT (type));
14281 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14282 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14283 NULL_TREE, NULL_TREE);
14284 }
14285 }
14286 }
14287
14288 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14289 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14290 && type == TREE_TYPE (TREE_TYPE (subtype))
14291 && (!in_gimple_form
14292 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14293 {
14294 tree type_domain;
14295 tree min_val = size_zero_node;
14296 sub = build_fold_indirect_ref_loc (loc, sub);
14297 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14298 if (type_domain && TYPE_MIN_VALUE (type_domain))
14299 min_val = TYPE_MIN_VALUE (type_domain);
14300 if (in_gimple_form
14301 && TREE_CODE (min_val) != INTEGER_CST)
14302 return NULL_TREE;
14303 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14304 NULL_TREE);
14305 }
14306
14307 return NULL_TREE;
14308 }
14309
14310 /* Builds an expression for an indirection through T, simplifying some
14311 cases. */
14312
14313 tree
14314 build_fold_indirect_ref_loc (location_t loc, tree t)
14315 {
14316 tree type = TREE_TYPE (TREE_TYPE (t));
14317 tree sub = fold_indirect_ref_1 (loc, type, t);
14318
14319 if (sub)
14320 return sub;
14321
14322 return build1_loc (loc, INDIRECT_REF, type, t);
14323 }
14324
14325 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14326
14327 tree
14328 fold_indirect_ref_loc (location_t loc, tree t)
14329 {
14330 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14331
14332 if (sub)
14333 return sub;
14334 else
14335 return t;
14336 }
14337
14338 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14339 whose result is ignored. The type of the returned tree need not be
14340 the same as the original expression. */
14341
14342 tree
14343 fold_ignored_result (tree t)
14344 {
14345 if (!TREE_SIDE_EFFECTS (t))
14346 return integer_zero_node;
14347
14348 for (;;)
14349 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14350 {
14351 case tcc_unary:
14352 t = TREE_OPERAND (t, 0);
14353 break;
14354
14355 case tcc_binary:
14356 case tcc_comparison:
14357 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14358 t = TREE_OPERAND (t, 0);
14359 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14360 t = TREE_OPERAND (t, 1);
14361 else
14362 return t;
14363 break;
14364
14365 case tcc_expression:
14366 switch (TREE_CODE (t))
14367 {
14368 case COMPOUND_EXPR:
14369 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14370 return t;
14371 t = TREE_OPERAND (t, 0);
14372 break;
14373
14374 case COND_EXPR:
14375 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14376 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14377 return t;
14378 t = TREE_OPERAND (t, 0);
14379 break;
14380
14381 default:
14382 return t;
14383 }
14384 break;
14385
14386 default:
14387 return t;
14388 }
14389 }
14390
14391 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14392
14393 tree
14394 round_up_loc (location_t loc, tree value, unsigned int divisor)
14395 {
14396 tree div = NULL_TREE;
14397
14398 if (divisor == 1)
14399 return value;
14400
14401 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14402 have to do anything. Only do this when we are not given a const,
14403 because in that case, this check is more expensive than just
14404 doing it. */
14405 if (TREE_CODE (value) != INTEGER_CST)
14406 {
14407 div = build_int_cst (TREE_TYPE (value), divisor);
14408
14409 if (multiple_of_p (TREE_TYPE (value), value, div))
14410 return value;
14411 }
14412
14413 /* If divisor is a power of two, simplify this to bit manipulation. */
14414 if (divisor == (divisor & -divisor))
14415 {
14416 if (TREE_CODE (value) == INTEGER_CST)
14417 {
14418 wide_int val = value;
14419 bool overflow_p;
14420
14421 if ((val & (divisor - 1)) == 0)
14422 return value;
14423
14424 overflow_p = TREE_OVERFLOW (value);
14425 val += divisor - 1;
14426 val &= - (int) divisor;
14427 if (val == 0)
14428 overflow_p = true;
14429
14430 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14431 }
14432 else
14433 {
14434 tree t;
14435
14436 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14437 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14438 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14439 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14440 }
14441 }
14442 else
14443 {
14444 if (!div)
14445 div = build_int_cst (TREE_TYPE (value), divisor);
14446 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14447 value = size_binop_loc (loc, MULT_EXPR, value, div);
14448 }
14449
14450 return value;
14451 }
14452
14453 /* Likewise, but round down. */
14454
14455 tree
14456 round_down_loc (location_t loc, tree value, int divisor)
14457 {
14458 tree div = NULL_TREE;
14459
14460 gcc_assert (divisor > 0);
14461 if (divisor == 1)
14462 return value;
14463
14464 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14465 have to do anything. Only do this when we are not given a const,
14466 because in that case, this check is more expensive than just
14467 doing it. */
14468 if (TREE_CODE (value) != INTEGER_CST)
14469 {
14470 div = build_int_cst (TREE_TYPE (value), divisor);
14471
14472 if (multiple_of_p (TREE_TYPE (value), value, div))
14473 return value;
14474 }
14475
14476 /* If divisor is a power of two, simplify this to bit manipulation. */
14477 if (divisor == (divisor & -divisor))
14478 {
14479 tree t;
14480
14481 t = build_int_cst (TREE_TYPE (value), -divisor);
14482 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14483 }
14484 else
14485 {
14486 if (!div)
14487 div = build_int_cst (TREE_TYPE (value), divisor);
14488 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14489 value = size_binop_loc (loc, MULT_EXPR, value, div);
14490 }
14491
14492 return value;
14493 }
14494
14495 /* Returns the pointer to the base of the object addressed by EXP and
14496 extracts the information about the offset of the access, storing it
14497 to PBITPOS and POFFSET. */
14498
14499 static tree
14500 split_address_to_core_and_offset (tree exp,
14501 HOST_WIDE_INT *pbitpos, tree *poffset)
14502 {
14503 tree core;
14504 machine_mode mode;
14505 int unsignedp, reversep, volatilep;
14506 HOST_WIDE_INT bitsize;
14507 location_t loc = EXPR_LOCATION (exp);
14508
14509 if (TREE_CODE (exp) == ADDR_EXPR)
14510 {
14511 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14512 poffset, &mode, &unsignedp, &reversep,
14513 &volatilep, false);
14514 core = build_fold_addr_expr_loc (loc, core);
14515 }
14516 else
14517 {
14518 core = exp;
14519 *pbitpos = 0;
14520 *poffset = NULL_TREE;
14521 }
14522
14523 return core;
14524 }
14525
14526 /* Returns true if addresses of E1 and E2 differ by a constant, false
14527 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14528
14529 bool
14530 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14531 {
14532 tree core1, core2;
14533 HOST_WIDE_INT bitpos1, bitpos2;
14534 tree toffset1, toffset2, tdiff, type;
14535
14536 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14537 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14538
14539 if (bitpos1 % BITS_PER_UNIT != 0
14540 || bitpos2 % BITS_PER_UNIT != 0
14541 || !operand_equal_p (core1, core2, 0))
14542 return false;
14543
14544 if (toffset1 && toffset2)
14545 {
14546 type = TREE_TYPE (toffset1);
14547 if (type != TREE_TYPE (toffset2))
14548 toffset2 = fold_convert (type, toffset2);
14549
14550 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14551 if (!cst_and_fits_in_hwi (tdiff))
14552 return false;
14553
14554 *diff = int_cst_value (tdiff);
14555 }
14556 else if (toffset1 || toffset2)
14557 {
14558 /* If only one of the offsets is non-constant, the difference cannot
14559 be a constant. */
14560 return false;
14561 }
14562 else
14563 *diff = 0;
14564
14565 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14566 return true;
14567 }
14568
14569 /* Return OFF converted to a pointer offset type suitable as offset for
14570 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14571 tree
14572 convert_to_ptrofftype_loc (location_t loc, tree off)
14573 {
14574 return fold_convert_loc (loc, sizetype, off);
14575 }
14576
14577 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14578 tree
14579 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14580 {
14581 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14582 ptr, convert_to_ptrofftype_loc (loc, off));
14583 }
14584
14585 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14586 tree
14587 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14588 {
14589 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14590 ptr, size_int (off));
14591 }
14592
14593 /* Return a char pointer for a C string if it is a string constant
14594 or sum of string constant and integer constant. */
14595
14596 const char *
14597 c_getstr (tree src)
14598 {
14599 tree offset_node;
14600
14601 src = string_constant (src, &offset_node);
14602 if (src == 0)
14603 return 0;
14604
14605 if (offset_node == 0)
14606 return TREE_STRING_POINTER (src);
14607 else if (!tree_fits_uhwi_p (offset_node)
14608 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14609 return 0;
14610
14611 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14612 }