Don't allow latch with phi in try_transform_to_exit_first_loop_alt
[gcc.git] / gcc / fold-const.c
1 /* Fold a constant sub-tree into a single node for C-compiler
2 Copyright (C) 1987-2016 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /*@@ This file should be rewritten to use an arbitrary precision
21 @@ representation for "struct tree_int_cst" and "struct tree_real_cst".
22 @@ Perhaps the routines could also be used for bc/dc, and made a lib.
23 @@ The routines that translate from the ap rep should
24 @@ warn if precision et. al. is lost.
25 @@ This would also make life easier when this technology is used
26 @@ for cross-compilers. */
27
28 /* The entry points in this file are fold, size_int_wide and size_binop.
29
30 fold takes a tree as argument and returns a simplified tree.
31
32 size_binop takes a tree code for an arithmetic operation
33 and two operands that are trees, and produces a tree for the
34 result, assuming the type comes from `sizetype'.
35
36 size_int takes an integer value, and creates a tree constant
37 with type from `sizetype'.
38
39 Note: Since the folders get called on non-gimple code as well as
40 gimple code, we need to handle GIMPLE tuples as well as their
41 corresponding tree equivalents. */
42
43 #include "config.h"
44 #include "system.h"
45 #include "coretypes.h"
46 #include "backend.h"
47 #include "target.h"
48 #include "rtl.h"
49 #include "tree.h"
50 #include "gimple.h"
51 #include "predict.h"
52 #include "tm_p.h"
53 #include "tree-ssa-operands.h"
54 #include "optabs-query.h"
55 #include "cgraph.h"
56 #include "diagnostic-core.h"
57 #include "flags.h"
58 #include "alias.h"
59 #include "fold-const.h"
60 #include "fold-const-call.h"
61 #include "stor-layout.h"
62 #include "calls.h"
63 #include "tree-iterator.h"
64 #include "expr.h"
65 #include "intl.h"
66 #include "langhooks.h"
67 #include "tree-eh.h"
68 #include "gimplify.h"
69 #include "tree-dfa.h"
70 #include "builtins.h"
71 #include "generic-match.h"
72 #include "gimple-fold.h"
73 #include "params.h"
74 #include "tree-into-ssa.h"
75 #include "md5.h"
76 #include "case-cfn-macros.h"
77 #include "stringpool.h"
78 #include "tree-ssanames.h"
79
80 #ifndef LOAD_EXTEND_OP
81 #define LOAD_EXTEND_OP(M) UNKNOWN
82 #endif
83
84 /* Nonzero if we are folding constants inside an initializer; zero
85 otherwise. */
86 int folding_initializer = 0;
87
88 /* The following constants represent a bit based encoding of GCC's
89 comparison operators. This encoding simplifies transformations
90 on relational comparison operators, such as AND and OR. */
91 enum comparison_code {
92 COMPCODE_FALSE = 0,
93 COMPCODE_LT = 1,
94 COMPCODE_EQ = 2,
95 COMPCODE_LE = 3,
96 COMPCODE_GT = 4,
97 COMPCODE_LTGT = 5,
98 COMPCODE_GE = 6,
99 COMPCODE_ORD = 7,
100 COMPCODE_UNORD = 8,
101 COMPCODE_UNLT = 9,
102 COMPCODE_UNEQ = 10,
103 COMPCODE_UNLE = 11,
104 COMPCODE_UNGT = 12,
105 COMPCODE_NE = 13,
106 COMPCODE_UNGE = 14,
107 COMPCODE_TRUE = 15
108 };
109
110 static bool negate_expr_p (tree);
111 static tree negate_expr (tree);
112 static tree split_tree (tree, enum tree_code, tree *, tree *, tree *, int);
113 static tree associate_trees (location_t, tree, tree, enum tree_code, tree);
114 static enum comparison_code comparison_to_compcode (enum tree_code);
115 static enum tree_code compcode_to_comparison (enum comparison_code);
116 static int operand_equal_for_comparison_p (tree, tree, tree);
117 static int twoval_comparison_p (tree, tree *, tree *, int *);
118 static tree eval_subst (location_t, tree, tree, tree, tree, tree);
119 static tree make_bit_field_ref (location_t, tree, tree,
120 HOST_WIDE_INT, HOST_WIDE_INT, int, int);
121 static tree optimize_bit_field_compare (location_t, enum tree_code,
122 tree, tree, tree);
123 static tree decode_field_reference (location_t, tree, HOST_WIDE_INT *,
124 HOST_WIDE_INT *,
125 machine_mode *, int *, int *, int *,
126 tree *, tree *);
127 static int simple_operand_p (const_tree);
128 static bool simple_operand_p_2 (tree);
129 static tree range_binop (enum tree_code, tree, tree, int, tree, int);
130 static tree range_predecessor (tree);
131 static tree range_successor (tree);
132 static tree fold_range_test (location_t, enum tree_code, tree, tree, tree);
133 static tree fold_cond_expr_with_comparison (location_t, tree, tree, tree, tree);
134 static tree unextend (tree, int, int, tree);
135 static tree optimize_minmax_comparison (location_t, enum tree_code,
136 tree, tree, tree);
137 static tree extract_muldiv (tree, tree, enum tree_code, tree, bool *);
138 static tree extract_muldiv_1 (tree, tree, enum tree_code, tree, bool *);
139 static tree fold_binary_op_with_conditional_arg (location_t,
140 enum tree_code, tree,
141 tree, tree,
142 tree, tree, int);
143 static tree fold_div_compare (location_t, enum tree_code, tree, tree, tree);
144 static bool reorder_operands_p (const_tree, const_tree);
145 static tree fold_negate_const (tree, tree);
146 static tree fold_not_const (const_tree, tree);
147 static tree fold_relational_const (enum tree_code, tree, tree, tree);
148 static tree fold_convert_const (enum tree_code, tree, tree);
149 static tree fold_view_convert_expr (tree, tree);
150 static bool vec_cst_ctor_to_array (tree, tree *);
151
152
153 /* Return EXPR_LOCATION of T if it is not UNKNOWN_LOCATION.
154 Otherwise, return LOC. */
155
156 static location_t
157 expr_location_or (tree t, location_t loc)
158 {
159 location_t tloc = EXPR_LOCATION (t);
160 return tloc == UNKNOWN_LOCATION ? loc : tloc;
161 }
162
163 /* Similar to protected_set_expr_location, but never modify x in place,
164 if location can and needs to be set, unshare it. */
165
166 static inline tree
167 protected_set_expr_location_unshare (tree x, location_t loc)
168 {
169 if (CAN_HAVE_LOCATION_P (x)
170 && EXPR_LOCATION (x) != loc
171 && !(TREE_CODE (x) == SAVE_EXPR
172 || TREE_CODE (x) == TARGET_EXPR
173 || TREE_CODE (x) == BIND_EXPR))
174 {
175 x = copy_node (x);
176 SET_EXPR_LOCATION (x, loc);
177 }
178 return x;
179 }
180 \f
181 /* If ARG2 divides ARG1 with zero remainder, carries out the exact
182 division and returns the quotient. Otherwise returns
183 NULL_TREE. */
184
185 tree
186 div_if_zero_remainder (const_tree arg1, const_tree arg2)
187 {
188 widest_int quo;
189
190 if (wi::multiple_of_p (wi::to_widest (arg1), wi::to_widest (arg2),
191 SIGNED, &quo))
192 return wide_int_to_tree (TREE_TYPE (arg1), quo);
193
194 return NULL_TREE;
195 }
196 \f
197 /* This is nonzero if we should defer warnings about undefined
198 overflow. This facility exists because these warnings are a
199 special case. The code to estimate loop iterations does not want
200 to issue any warnings, since it works with expressions which do not
201 occur in user code. Various bits of cleanup code call fold(), but
202 only use the result if it has certain characteristics (e.g., is a
203 constant); that code only wants to issue a warning if the result is
204 used. */
205
206 static int fold_deferring_overflow_warnings;
207
208 /* If a warning about undefined overflow is deferred, this is the
209 warning. Note that this may cause us to turn two warnings into
210 one, but that is fine since it is sufficient to only give one
211 warning per expression. */
212
213 static const char* fold_deferred_overflow_warning;
214
215 /* If a warning about undefined overflow is deferred, this is the
216 level at which the warning should be emitted. */
217
218 static enum warn_strict_overflow_code fold_deferred_overflow_code;
219
220 /* Start deferring overflow warnings. We could use a stack here to
221 permit nested calls, but at present it is not necessary. */
222
223 void
224 fold_defer_overflow_warnings (void)
225 {
226 ++fold_deferring_overflow_warnings;
227 }
228
229 /* Stop deferring overflow warnings. If there is a pending warning,
230 and ISSUE is true, then issue the warning if appropriate. STMT is
231 the statement with which the warning should be associated (used for
232 location information); STMT may be NULL. CODE is the level of the
233 warning--a warn_strict_overflow_code value. This function will use
234 the smaller of CODE and the deferred code when deciding whether to
235 issue the warning. CODE may be zero to mean to always use the
236 deferred code. */
237
238 void
239 fold_undefer_overflow_warnings (bool issue, const gimple *stmt, int code)
240 {
241 const char *warnmsg;
242 location_t locus;
243
244 gcc_assert (fold_deferring_overflow_warnings > 0);
245 --fold_deferring_overflow_warnings;
246 if (fold_deferring_overflow_warnings > 0)
247 {
248 if (fold_deferred_overflow_warning != NULL
249 && code != 0
250 && code < (int) fold_deferred_overflow_code)
251 fold_deferred_overflow_code = (enum warn_strict_overflow_code) code;
252 return;
253 }
254
255 warnmsg = fold_deferred_overflow_warning;
256 fold_deferred_overflow_warning = NULL;
257
258 if (!issue || warnmsg == NULL)
259 return;
260
261 if (gimple_no_warning_p (stmt))
262 return;
263
264 /* Use the smallest code level when deciding to issue the
265 warning. */
266 if (code == 0 || code > (int) fold_deferred_overflow_code)
267 code = fold_deferred_overflow_code;
268
269 if (!issue_strict_overflow_warning (code))
270 return;
271
272 if (stmt == NULL)
273 locus = input_location;
274 else
275 locus = gimple_location (stmt);
276 warning_at (locus, OPT_Wstrict_overflow, "%s", warnmsg);
277 }
278
279 /* Stop deferring overflow warnings, ignoring any deferred
280 warnings. */
281
282 void
283 fold_undefer_and_ignore_overflow_warnings (void)
284 {
285 fold_undefer_overflow_warnings (false, NULL, 0);
286 }
287
288 /* Whether we are deferring overflow warnings. */
289
290 bool
291 fold_deferring_overflow_warnings_p (void)
292 {
293 return fold_deferring_overflow_warnings > 0;
294 }
295
296 /* This is called when we fold something based on the fact that signed
297 overflow is undefined. */
298
299 static void
300 fold_overflow_warning (const char* gmsgid, enum warn_strict_overflow_code wc)
301 {
302 if (fold_deferring_overflow_warnings > 0)
303 {
304 if (fold_deferred_overflow_warning == NULL
305 || wc < fold_deferred_overflow_code)
306 {
307 fold_deferred_overflow_warning = gmsgid;
308 fold_deferred_overflow_code = wc;
309 }
310 }
311 else if (issue_strict_overflow_warning (wc))
312 warning (OPT_Wstrict_overflow, gmsgid);
313 }
314 \f
315 /* Return true if the built-in mathematical function specified by CODE
316 is odd, i.e. -f(x) == f(-x). */
317
318 bool
319 negate_mathfn_p (combined_fn fn)
320 {
321 switch (fn)
322 {
323 CASE_CFN_ASIN:
324 CASE_CFN_ASINH:
325 CASE_CFN_ATAN:
326 CASE_CFN_ATANH:
327 CASE_CFN_CASIN:
328 CASE_CFN_CASINH:
329 CASE_CFN_CATAN:
330 CASE_CFN_CATANH:
331 CASE_CFN_CBRT:
332 CASE_CFN_CPROJ:
333 CASE_CFN_CSIN:
334 CASE_CFN_CSINH:
335 CASE_CFN_CTAN:
336 CASE_CFN_CTANH:
337 CASE_CFN_ERF:
338 CASE_CFN_LLROUND:
339 CASE_CFN_LROUND:
340 CASE_CFN_ROUND:
341 CASE_CFN_SIN:
342 CASE_CFN_SINH:
343 CASE_CFN_TAN:
344 CASE_CFN_TANH:
345 CASE_CFN_TRUNC:
346 return true;
347
348 CASE_CFN_LLRINT:
349 CASE_CFN_LRINT:
350 CASE_CFN_NEARBYINT:
351 CASE_CFN_RINT:
352 return !flag_rounding_math;
353
354 default:
355 break;
356 }
357 return false;
358 }
359
360 /* Check whether we may negate an integer constant T without causing
361 overflow. */
362
363 bool
364 may_negate_without_overflow_p (const_tree t)
365 {
366 tree type;
367
368 gcc_assert (TREE_CODE (t) == INTEGER_CST);
369
370 type = TREE_TYPE (t);
371 if (TYPE_UNSIGNED (type))
372 return false;
373
374 return !wi::only_sign_bit_p (t);
375 }
376
377 /* Determine whether an expression T can be cheaply negated using
378 the function negate_expr without introducing undefined overflow. */
379
380 static bool
381 negate_expr_p (tree t)
382 {
383 tree type;
384
385 if (t == 0)
386 return false;
387
388 type = TREE_TYPE (t);
389
390 STRIP_SIGN_NOPS (t);
391 switch (TREE_CODE (t))
392 {
393 case INTEGER_CST:
394 if (INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_WRAPS (type))
395 return true;
396
397 /* Check that -CST will not overflow type. */
398 return may_negate_without_overflow_p (t);
399 case BIT_NOT_EXPR:
400 return (INTEGRAL_TYPE_P (type)
401 && TYPE_OVERFLOW_WRAPS (type));
402
403 case FIXED_CST:
404 return true;
405
406 case NEGATE_EXPR:
407 return !TYPE_OVERFLOW_SANITIZED (type);
408
409 case REAL_CST:
410 /* We want to canonicalize to positive real constants. Pretend
411 that only negative ones can be easily negated. */
412 return REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
413
414 case COMPLEX_CST:
415 return negate_expr_p (TREE_REALPART (t))
416 && negate_expr_p (TREE_IMAGPART (t));
417
418 case VECTOR_CST:
419 {
420 if (FLOAT_TYPE_P (TREE_TYPE (type)) || TYPE_OVERFLOW_WRAPS (type))
421 return true;
422
423 int count = TYPE_VECTOR_SUBPARTS (type), i;
424
425 for (i = 0; i < count; i++)
426 if (!negate_expr_p (VECTOR_CST_ELT (t, i)))
427 return false;
428
429 return true;
430 }
431
432 case COMPLEX_EXPR:
433 return negate_expr_p (TREE_OPERAND (t, 0))
434 && negate_expr_p (TREE_OPERAND (t, 1));
435
436 case CONJ_EXPR:
437 return negate_expr_p (TREE_OPERAND (t, 0));
438
439 case PLUS_EXPR:
440 if (HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
441 || HONOR_SIGNED_ZEROS (element_mode (type))
442 || (INTEGRAL_TYPE_P (type)
443 && ! TYPE_OVERFLOW_WRAPS (type)))
444 return false;
445 /* -(A + B) -> (-B) - A. */
446 if (negate_expr_p (TREE_OPERAND (t, 1))
447 && reorder_operands_p (TREE_OPERAND (t, 0),
448 TREE_OPERAND (t, 1)))
449 return true;
450 /* -(A + B) -> (-A) - B. */
451 return negate_expr_p (TREE_OPERAND (t, 0));
452
453 case MINUS_EXPR:
454 /* We can't turn -(A-B) into B-A when we honor signed zeros. */
455 return !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
456 && !HONOR_SIGNED_ZEROS (element_mode (type))
457 && (! INTEGRAL_TYPE_P (type)
458 || TYPE_OVERFLOW_WRAPS (type))
459 && reorder_operands_p (TREE_OPERAND (t, 0),
460 TREE_OPERAND (t, 1));
461
462 case MULT_EXPR:
463 if (TYPE_UNSIGNED (type))
464 break;
465 /* INT_MIN/n * n doesn't overflow while negating one operand it does
466 if n is a power of two. */
467 if (INTEGRAL_TYPE_P (TREE_TYPE (t))
468 && ! TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
469 && ! ((TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST
470 && ! integer_pow2p (TREE_OPERAND (t, 0)))
471 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
472 && ! integer_pow2p (TREE_OPERAND (t, 1)))))
473 break;
474
475 /* Fall through. */
476
477 case RDIV_EXPR:
478 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (TREE_TYPE (t))))
479 return negate_expr_p (TREE_OPERAND (t, 1))
480 || negate_expr_p (TREE_OPERAND (t, 0));
481 break;
482
483 case TRUNC_DIV_EXPR:
484 case ROUND_DIV_EXPR:
485 case EXACT_DIV_EXPR:
486 if (TYPE_UNSIGNED (type))
487 break;
488 if (negate_expr_p (TREE_OPERAND (t, 0)))
489 return true;
490 /* In general we can't negate B in A / B, because if A is INT_MIN and
491 B is 1, we may turn this into INT_MIN / -1 which is undefined
492 and actually traps on some architectures. */
493 if (! INTEGRAL_TYPE_P (TREE_TYPE (t))
494 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
495 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
496 && ! integer_onep (TREE_OPERAND (t, 1))))
497 return negate_expr_p (TREE_OPERAND (t, 1));
498 break;
499
500 case NOP_EXPR:
501 /* Negate -((double)float) as (double)(-float). */
502 if (TREE_CODE (type) == REAL_TYPE)
503 {
504 tree tem = strip_float_extensions (t);
505 if (tem != t)
506 return negate_expr_p (tem);
507 }
508 break;
509
510 case CALL_EXPR:
511 /* Negate -f(x) as f(-x). */
512 if (negate_mathfn_p (get_call_combined_fn (t)))
513 return negate_expr_p (CALL_EXPR_ARG (t, 0));
514 break;
515
516 case RSHIFT_EXPR:
517 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
518 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
519 {
520 tree op1 = TREE_OPERAND (t, 1);
521 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
522 return true;
523 }
524 break;
525
526 default:
527 break;
528 }
529 return false;
530 }
531
532 /* Given T, an expression, return a folded tree for -T or NULL_TREE, if no
533 simplification is possible.
534 If negate_expr_p would return true for T, NULL_TREE will never be
535 returned. */
536
537 static tree
538 fold_negate_expr (location_t loc, tree t)
539 {
540 tree type = TREE_TYPE (t);
541 tree tem;
542
543 switch (TREE_CODE (t))
544 {
545 /* Convert - (~A) to A + 1. */
546 case BIT_NOT_EXPR:
547 if (INTEGRAL_TYPE_P (type))
548 return fold_build2_loc (loc, PLUS_EXPR, type, TREE_OPERAND (t, 0),
549 build_one_cst (type));
550 break;
551
552 case INTEGER_CST:
553 tem = fold_negate_const (t, type);
554 if (TREE_OVERFLOW (tem) == TREE_OVERFLOW (t)
555 || (ANY_INTEGRAL_TYPE_P (type)
556 && !TYPE_OVERFLOW_TRAPS (type)
557 && TYPE_OVERFLOW_WRAPS (type))
558 || (flag_sanitize & SANITIZE_SI_OVERFLOW) == 0)
559 return tem;
560 break;
561
562 case REAL_CST:
563 tem = fold_negate_const (t, type);
564 return tem;
565
566 case FIXED_CST:
567 tem = fold_negate_const (t, type);
568 return tem;
569
570 case COMPLEX_CST:
571 {
572 tree rpart = fold_negate_expr (loc, TREE_REALPART (t));
573 tree ipart = fold_negate_expr (loc, TREE_IMAGPART (t));
574 if (rpart && ipart)
575 return build_complex (type, rpart, ipart);
576 }
577 break;
578
579 case VECTOR_CST:
580 {
581 int count = TYPE_VECTOR_SUBPARTS (type), i;
582 tree *elts = XALLOCAVEC (tree, count);
583
584 for (i = 0; i < count; i++)
585 {
586 elts[i] = fold_negate_expr (loc, VECTOR_CST_ELT (t, i));
587 if (elts[i] == NULL_TREE)
588 return NULL_TREE;
589 }
590
591 return build_vector (type, elts);
592 }
593
594 case COMPLEX_EXPR:
595 if (negate_expr_p (t))
596 return fold_build2_loc (loc, COMPLEX_EXPR, type,
597 fold_negate_expr (loc, TREE_OPERAND (t, 0)),
598 fold_negate_expr (loc, TREE_OPERAND (t, 1)));
599 break;
600
601 case CONJ_EXPR:
602 if (negate_expr_p (t))
603 return fold_build1_loc (loc, CONJ_EXPR, type,
604 fold_negate_expr (loc, TREE_OPERAND (t, 0)));
605 break;
606
607 case NEGATE_EXPR:
608 if (!TYPE_OVERFLOW_SANITIZED (type))
609 return TREE_OPERAND (t, 0);
610 break;
611
612 case PLUS_EXPR:
613 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
614 && !HONOR_SIGNED_ZEROS (element_mode (type)))
615 {
616 /* -(A + B) -> (-B) - A. */
617 if (negate_expr_p (TREE_OPERAND (t, 1))
618 && reorder_operands_p (TREE_OPERAND (t, 0),
619 TREE_OPERAND (t, 1)))
620 {
621 tem = negate_expr (TREE_OPERAND (t, 1));
622 return fold_build2_loc (loc, MINUS_EXPR, type,
623 tem, TREE_OPERAND (t, 0));
624 }
625
626 /* -(A + B) -> (-A) - B. */
627 if (negate_expr_p (TREE_OPERAND (t, 0)))
628 {
629 tem = negate_expr (TREE_OPERAND (t, 0));
630 return fold_build2_loc (loc, MINUS_EXPR, type,
631 tem, TREE_OPERAND (t, 1));
632 }
633 }
634 break;
635
636 case MINUS_EXPR:
637 /* - (A - B) -> B - A */
638 if (!HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type))
639 && !HONOR_SIGNED_ZEROS (element_mode (type))
640 && reorder_operands_p (TREE_OPERAND (t, 0), TREE_OPERAND (t, 1)))
641 return fold_build2_loc (loc, MINUS_EXPR, type,
642 TREE_OPERAND (t, 1), TREE_OPERAND (t, 0));
643 break;
644
645 case MULT_EXPR:
646 if (TYPE_UNSIGNED (type))
647 break;
648
649 /* Fall through. */
650
651 case RDIV_EXPR:
652 if (! HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type)))
653 {
654 tem = TREE_OPERAND (t, 1);
655 if (negate_expr_p (tem))
656 return fold_build2_loc (loc, TREE_CODE (t), type,
657 TREE_OPERAND (t, 0), negate_expr (tem));
658 tem = TREE_OPERAND (t, 0);
659 if (negate_expr_p (tem))
660 return fold_build2_loc (loc, TREE_CODE (t), type,
661 negate_expr (tem), TREE_OPERAND (t, 1));
662 }
663 break;
664
665 case TRUNC_DIV_EXPR:
666 case ROUND_DIV_EXPR:
667 case EXACT_DIV_EXPR:
668 if (TYPE_UNSIGNED (type))
669 break;
670 if (negate_expr_p (TREE_OPERAND (t, 0)))
671 return fold_build2_loc (loc, TREE_CODE (t), type,
672 negate_expr (TREE_OPERAND (t, 0)),
673 TREE_OPERAND (t, 1));
674 /* In general we can't negate B in A / B, because if A is INT_MIN and
675 B is 1, we may turn this into INT_MIN / -1 which is undefined
676 and actually traps on some architectures. */
677 if ((! INTEGRAL_TYPE_P (TREE_TYPE (t))
678 || TYPE_OVERFLOW_WRAPS (TREE_TYPE (t))
679 || (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
680 && ! integer_onep (TREE_OPERAND (t, 1))))
681 && negate_expr_p (TREE_OPERAND (t, 1)))
682 return fold_build2_loc (loc, TREE_CODE (t), type,
683 TREE_OPERAND (t, 0),
684 negate_expr (TREE_OPERAND (t, 1)));
685 break;
686
687 case NOP_EXPR:
688 /* Convert -((double)float) into (double)(-float). */
689 if (TREE_CODE (type) == REAL_TYPE)
690 {
691 tem = strip_float_extensions (t);
692 if (tem != t && negate_expr_p (tem))
693 return fold_convert_loc (loc, type, negate_expr (tem));
694 }
695 break;
696
697 case CALL_EXPR:
698 /* Negate -f(x) as f(-x). */
699 if (negate_mathfn_p (get_call_combined_fn (t))
700 && negate_expr_p (CALL_EXPR_ARG (t, 0)))
701 {
702 tree fndecl, arg;
703
704 fndecl = get_callee_fndecl (t);
705 arg = negate_expr (CALL_EXPR_ARG (t, 0));
706 return build_call_expr_loc (loc, fndecl, 1, arg);
707 }
708 break;
709
710 case RSHIFT_EXPR:
711 /* Optimize -((int)x >> 31) into (unsigned)x >> 31 for int. */
712 if (TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST)
713 {
714 tree op1 = TREE_OPERAND (t, 1);
715 if (wi::eq_p (op1, TYPE_PRECISION (type) - 1))
716 {
717 tree ntype = TYPE_UNSIGNED (type)
718 ? signed_type_for (type)
719 : unsigned_type_for (type);
720 tree temp = fold_convert_loc (loc, ntype, TREE_OPERAND (t, 0));
721 temp = fold_build2_loc (loc, RSHIFT_EXPR, ntype, temp, op1);
722 return fold_convert_loc (loc, type, temp);
723 }
724 }
725 break;
726
727 default:
728 break;
729 }
730
731 return NULL_TREE;
732 }
733
734 /* Like fold_negate_expr, but return a NEGATE_EXPR tree, if T can not be
735 negated in a simpler way. Also allow for T to be NULL_TREE, in which case
736 return NULL_TREE. */
737
738 static tree
739 negate_expr (tree t)
740 {
741 tree type, tem;
742 location_t loc;
743
744 if (t == NULL_TREE)
745 return NULL_TREE;
746
747 loc = EXPR_LOCATION (t);
748 type = TREE_TYPE (t);
749 STRIP_SIGN_NOPS (t);
750
751 tem = fold_negate_expr (loc, t);
752 if (!tem)
753 tem = build1_loc (loc, NEGATE_EXPR, TREE_TYPE (t), t);
754 return fold_convert_loc (loc, type, tem);
755 }
756 \f
757 /* Split a tree IN into a constant, literal and variable parts that could be
758 combined with CODE to make IN. "constant" means an expression with
759 TREE_CONSTANT but that isn't an actual constant. CODE must be a
760 commutative arithmetic operation. Store the constant part into *CONP,
761 the literal in *LITP and return the variable part. If a part isn't
762 present, set it to null. If the tree does not decompose in this way,
763 return the entire tree as the variable part and the other parts as null.
764
765 If CODE is PLUS_EXPR we also split trees that use MINUS_EXPR. In that
766 case, we negate an operand that was subtracted. Except if it is a
767 literal for which we use *MINUS_LITP instead.
768
769 If NEGATE_P is true, we are negating all of IN, again except a literal
770 for which we use *MINUS_LITP instead.
771
772 If IN is itself a literal or constant, return it as appropriate.
773
774 Note that we do not guarantee that any of the three values will be the
775 same type as IN, but they will have the same signedness and mode. */
776
777 static tree
778 split_tree (tree in, enum tree_code code, tree *conp, tree *litp,
779 tree *minus_litp, int negate_p)
780 {
781 tree var = 0;
782
783 *conp = 0;
784 *litp = 0;
785 *minus_litp = 0;
786
787 /* Strip any conversions that don't change the machine mode or signedness. */
788 STRIP_SIGN_NOPS (in);
789
790 if (TREE_CODE (in) == INTEGER_CST || TREE_CODE (in) == REAL_CST
791 || TREE_CODE (in) == FIXED_CST)
792 *litp = in;
793 else if (TREE_CODE (in) == code
794 || ((! FLOAT_TYPE_P (TREE_TYPE (in)) || flag_associative_math)
795 && ! SAT_FIXED_POINT_TYPE_P (TREE_TYPE (in))
796 /* We can associate addition and subtraction together (even
797 though the C standard doesn't say so) for integers because
798 the value is not affected. For reals, the value might be
799 affected, so we can't. */
800 && ((code == PLUS_EXPR && TREE_CODE (in) == MINUS_EXPR)
801 || (code == MINUS_EXPR && TREE_CODE (in) == PLUS_EXPR))))
802 {
803 tree op0 = TREE_OPERAND (in, 0);
804 tree op1 = TREE_OPERAND (in, 1);
805 int neg1_p = TREE_CODE (in) == MINUS_EXPR;
806 int neg_litp_p = 0, neg_conp_p = 0, neg_var_p = 0;
807
808 /* First see if either of the operands is a literal, then a constant. */
809 if (TREE_CODE (op0) == INTEGER_CST || TREE_CODE (op0) == REAL_CST
810 || TREE_CODE (op0) == FIXED_CST)
811 *litp = op0, op0 = 0;
812 else if (TREE_CODE (op1) == INTEGER_CST || TREE_CODE (op1) == REAL_CST
813 || TREE_CODE (op1) == FIXED_CST)
814 *litp = op1, neg_litp_p = neg1_p, op1 = 0;
815
816 if (op0 != 0 && TREE_CONSTANT (op0))
817 *conp = op0, op0 = 0;
818 else if (op1 != 0 && TREE_CONSTANT (op1))
819 *conp = op1, neg_conp_p = neg1_p, op1 = 0;
820
821 /* If we haven't dealt with either operand, this is not a case we can
822 decompose. Otherwise, VAR is either of the ones remaining, if any. */
823 if (op0 != 0 && op1 != 0)
824 var = in;
825 else if (op0 != 0)
826 var = op0;
827 else
828 var = op1, neg_var_p = neg1_p;
829
830 /* Now do any needed negations. */
831 if (neg_litp_p)
832 *minus_litp = *litp, *litp = 0;
833 if (neg_conp_p)
834 *conp = negate_expr (*conp);
835 if (neg_var_p)
836 var = negate_expr (var);
837 }
838 else if (TREE_CODE (in) == BIT_NOT_EXPR
839 && code == PLUS_EXPR)
840 {
841 /* -X - 1 is folded to ~X, undo that here. */
842 *minus_litp = build_one_cst (TREE_TYPE (in));
843 var = negate_expr (TREE_OPERAND (in, 0));
844 }
845 else if (TREE_CONSTANT (in))
846 *conp = in;
847 else
848 var = in;
849
850 if (negate_p)
851 {
852 if (*litp)
853 *minus_litp = *litp, *litp = 0;
854 else if (*minus_litp)
855 *litp = *minus_litp, *minus_litp = 0;
856 *conp = negate_expr (*conp);
857 var = negate_expr (var);
858 }
859
860 return var;
861 }
862
863 /* Re-associate trees split by the above function. T1 and T2 are
864 either expressions to associate or null. Return the new
865 expression, if any. LOC is the location of the new expression. If
866 we build an operation, do it in TYPE and with CODE. */
867
868 static tree
869 associate_trees (location_t loc, tree t1, tree t2, enum tree_code code, tree type)
870 {
871 if (t1 == 0)
872 return t2;
873 else if (t2 == 0)
874 return t1;
875
876 /* If either input is CODE, a PLUS_EXPR, or a MINUS_EXPR, don't
877 try to fold this since we will have infinite recursion. But do
878 deal with any NEGATE_EXPRs. */
879 if (TREE_CODE (t1) == code || TREE_CODE (t2) == code
880 || TREE_CODE (t1) == MINUS_EXPR || TREE_CODE (t2) == MINUS_EXPR)
881 {
882 if (code == PLUS_EXPR)
883 {
884 if (TREE_CODE (t1) == NEGATE_EXPR)
885 return build2_loc (loc, MINUS_EXPR, type,
886 fold_convert_loc (loc, type, t2),
887 fold_convert_loc (loc, type,
888 TREE_OPERAND (t1, 0)));
889 else if (TREE_CODE (t2) == NEGATE_EXPR)
890 return build2_loc (loc, MINUS_EXPR, type,
891 fold_convert_loc (loc, type, t1),
892 fold_convert_loc (loc, type,
893 TREE_OPERAND (t2, 0)));
894 else if (integer_zerop (t2))
895 return fold_convert_loc (loc, type, t1);
896 }
897 else if (code == MINUS_EXPR)
898 {
899 if (integer_zerop (t2))
900 return fold_convert_loc (loc, type, t1);
901 }
902
903 return build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
904 fold_convert_loc (loc, type, t2));
905 }
906
907 return fold_build2_loc (loc, code, type, fold_convert_loc (loc, type, t1),
908 fold_convert_loc (loc, type, t2));
909 }
910 \f
911 /* Check whether TYPE1 and TYPE2 are equivalent integer types, suitable
912 for use in int_const_binop, size_binop and size_diffop. */
913
914 static bool
915 int_binop_types_match_p (enum tree_code code, const_tree type1, const_tree type2)
916 {
917 if (!INTEGRAL_TYPE_P (type1) && !POINTER_TYPE_P (type1))
918 return false;
919 if (!INTEGRAL_TYPE_P (type2) && !POINTER_TYPE_P (type2))
920 return false;
921
922 switch (code)
923 {
924 case LSHIFT_EXPR:
925 case RSHIFT_EXPR:
926 case LROTATE_EXPR:
927 case RROTATE_EXPR:
928 return true;
929
930 default:
931 break;
932 }
933
934 return TYPE_UNSIGNED (type1) == TYPE_UNSIGNED (type2)
935 && TYPE_PRECISION (type1) == TYPE_PRECISION (type2)
936 && TYPE_MODE (type1) == TYPE_MODE (type2);
937 }
938
939
940 /* Combine two integer constants ARG1 and ARG2 under operation CODE
941 to produce a new constant. Return NULL_TREE if we don't know how
942 to evaluate CODE at compile-time. */
943
944 static tree
945 int_const_binop_1 (enum tree_code code, const_tree arg1, const_tree parg2,
946 int overflowable)
947 {
948 wide_int res;
949 tree t;
950 tree type = TREE_TYPE (arg1);
951 signop sign = TYPE_SIGN (type);
952 bool overflow = false;
953
954 wide_int arg2 = wide_int::from (parg2, TYPE_PRECISION (type),
955 TYPE_SIGN (TREE_TYPE (parg2)));
956
957 switch (code)
958 {
959 case BIT_IOR_EXPR:
960 res = wi::bit_or (arg1, arg2);
961 break;
962
963 case BIT_XOR_EXPR:
964 res = wi::bit_xor (arg1, arg2);
965 break;
966
967 case BIT_AND_EXPR:
968 res = wi::bit_and (arg1, arg2);
969 break;
970
971 case RSHIFT_EXPR:
972 case LSHIFT_EXPR:
973 if (wi::neg_p (arg2))
974 {
975 arg2 = -arg2;
976 if (code == RSHIFT_EXPR)
977 code = LSHIFT_EXPR;
978 else
979 code = RSHIFT_EXPR;
980 }
981
982 if (code == RSHIFT_EXPR)
983 /* It's unclear from the C standard whether shifts can overflow.
984 The following code ignores overflow; perhaps a C standard
985 interpretation ruling is needed. */
986 res = wi::rshift (arg1, arg2, sign);
987 else
988 res = wi::lshift (arg1, arg2);
989 break;
990
991 case RROTATE_EXPR:
992 case LROTATE_EXPR:
993 if (wi::neg_p (arg2))
994 {
995 arg2 = -arg2;
996 if (code == RROTATE_EXPR)
997 code = LROTATE_EXPR;
998 else
999 code = RROTATE_EXPR;
1000 }
1001
1002 if (code == RROTATE_EXPR)
1003 res = wi::rrotate (arg1, arg2);
1004 else
1005 res = wi::lrotate (arg1, arg2);
1006 break;
1007
1008 case PLUS_EXPR:
1009 res = wi::add (arg1, arg2, sign, &overflow);
1010 break;
1011
1012 case MINUS_EXPR:
1013 res = wi::sub (arg1, arg2, sign, &overflow);
1014 break;
1015
1016 case MULT_EXPR:
1017 res = wi::mul (arg1, arg2, sign, &overflow);
1018 break;
1019
1020 case MULT_HIGHPART_EXPR:
1021 res = wi::mul_high (arg1, arg2, sign);
1022 break;
1023
1024 case TRUNC_DIV_EXPR:
1025 case EXACT_DIV_EXPR:
1026 if (arg2 == 0)
1027 return NULL_TREE;
1028 res = wi::div_trunc (arg1, arg2, sign, &overflow);
1029 break;
1030
1031 case FLOOR_DIV_EXPR:
1032 if (arg2 == 0)
1033 return NULL_TREE;
1034 res = wi::div_floor (arg1, arg2, sign, &overflow);
1035 break;
1036
1037 case CEIL_DIV_EXPR:
1038 if (arg2 == 0)
1039 return NULL_TREE;
1040 res = wi::div_ceil (arg1, arg2, sign, &overflow);
1041 break;
1042
1043 case ROUND_DIV_EXPR:
1044 if (arg2 == 0)
1045 return NULL_TREE;
1046 res = wi::div_round (arg1, arg2, sign, &overflow);
1047 break;
1048
1049 case TRUNC_MOD_EXPR:
1050 if (arg2 == 0)
1051 return NULL_TREE;
1052 res = wi::mod_trunc (arg1, arg2, sign, &overflow);
1053 break;
1054
1055 case FLOOR_MOD_EXPR:
1056 if (arg2 == 0)
1057 return NULL_TREE;
1058 res = wi::mod_floor (arg1, arg2, sign, &overflow);
1059 break;
1060
1061 case CEIL_MOD_EXPR:
1062 if (arg2 == 0)
1063 return NULL_TREE;
1064 res = wi::mod_ceil (arg1, arg2, sign, &overflow);
1065 break;
1066
1067 case ROUND_MOD_EXPR:
1068 if (arg2 == 0)
1069 return NULL_TREE;
1070 res = wi::mod_round (arg1, arg2, sign, &overflow);
1071 break;
1072
1073 case MIN_EXPR:
1074 res = wi::min (arg1, arg2, sign);
1075 break;
1076
1077 case MAX_EXPR:
1078 res = wi::max (arg1, arg2, sign);
1079 break;
1080
1081 default:
1082 return NULL_TREE;
1083 }
1084
1085 t = force_fit_type (type, res, overflowable,
1086 (((sign == SIGNED || overflowable == -1)
1087 && overflow)
1088 | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (parg2)));
1089
1090 return t;
1091 }
1092
1093 tree
1094 int_const_binop (enum tree_code code, const_tree arg1, const_tree arg2)
1095 {
1096 return int_const_binop_1 (code, arg1, arg2, 1);
1097 }
1098
1099 /* Combine two constants ARG1 and ARG2 under operation CODE to produce a new
1100 constant. We assume ARG1 and ARG2 have the same data type, or at least
1101 are the same kind of constant and the same machine mode. Return zero if
1102 combining the constants is not allowed in the current operating mode. */
1103
1104 static tree
1105 const_binop (enum tree_code code, tree arg1, tree arg2)
1106 {
1107 /* Sanity check for the recursive cases. */
1108 if (!arg1 || !arg2)
1109 return NULL_TREE;
1110
1111 STRIP_NOPS (arg1);
1112 STRIP_NOPS (arg2);
1113
1114 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg2) == INTEGER_CST)
1115 {
1116 if (code == POINTER_PLUS_EXPR)
1117 return int_const_binop (PLUS_EXPR,
1118 arg1, fold_convert (TREE_TYPE (arg1), arg2));
1119
1120 return int_const_binop (code, arg1, arg2);
1121 }
1122
1123 if (TREE_CODE (arg1) == REAL_CST && TREE_CODE (arg2) == REAL_CST)
1124 {
1125 machine_mode mode;
1126 REAL_VALUE_TYPE d1;
1127 REAL_VALUE_TYPE d2;
1128 REAL_VALUE_TYPE value;
1129 REAL_VALUE_TYPE result;
1130 bool inexact;
1131 tree t, type;
1132
1133 /* The following codes are handled by real_arithmetic. */
1134 switch (code)
1135 {
1136 case PLUS_EXPR:
1137 case MINUS_EXPR:
1138 case MULT_EXPR:
1139 case RDIV_EXPR:
1140 case MIN_EXPR:
1141 case MAX_EXPR:
1142 break;
1143
1144 default:
1145 return NULL_TREE;
1146 }
1147
1148 d1 = TREE_REAL_CST (arg1);
1149 d2 = TREE_REAL_CST (arg2);
1150
1151 type = TREE_TYPE (arg1);
1152 mode = TYPE_MODE (type);
1153
1154 /* Don't perform operation if we honor signaling NaNs and
1155 either operand is a signaling NaN. */
1156 if (HONOR_SNANS (mode)
1157 && (REAL_VALUE_ISSIGNALING_NAN (d1)
1158 || REAL_VALUE_ISSIGNALING_NAN (d2)))
1159 return NULL_TREE;
1160
1161 /* Don't perform operation if it would raise a division
1162 by zero exception. */
1163 if (code == RDIV_EXPR
1164 && real_equal (&d2, &dconst0)
1165 && (flag_trapping_math || ! MODE_HAS_INFINITIES (mode)))
1166 return NULL_TREE;
1167
1168 /* If either operand is a NaN, just return it. Otherwise, set up
1169 for floating-point trap; we return an overflow. */
1170 if (REAL_VALUE_ISNAN (d1))
1171 {
1172 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1173 is off. */
1174 d1.signalling = 0;
1175 t = build_real (type, d1);
1176 return t;
1177 }
1178 else if (REAL_VALUE_ISNAN (d2))
1179 {
1180 /* Make resulting NaN value to be qNaN when flag_signaling_nans
1181 is off. */
1182 d2.signalling = 0;
1183 t = build_real (type, d2);
1184 return t;
1185 }
1186
1187 inexact = real_arithmetic (&value, code, &d1, &d2);
1188 real_convert (&result, mode, &value);
1189
1190 /* Don't constant fold this floating point operation if
1191 the result has overflowed and flag_trapping_math. */
1192 if (flag_trapping_math
1193 && MODE_HAS_INFINITIES (mode)
1194 && REAL_VALUE_ISINF (result)
1195 && !REAL_VALUE_ISINF (d1)
1196 && !REAL_VALUE_ISINF (d2))
1197 return NULL_TREE;
1198
1199 /* Don't constant fold this floating point operation if the
1200 result may dependent upon the run-time rounding mode and
1201 flag_rounding_math is set, or if GCC's software emulation
1202 is unable to accurately represent the result. */
1203 if ((flag_rounding_math
1204 || (MODE_COMPOSITE_P (mode) && !flag_unsafe_math_optimizations))
1205 && (inexact || !real_identical (&result, &value)))
1206 return NULL_TREE;
1207
1208 t = build_real (type, result);
1209
1210 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2);
1211 return t;
1212 }
1213
1214 if (TREE_CODE (arg1) == FIXED_CST)
1215 {
1216 FIXED_VALUE_TYPE f1;
1217 FIXED_VALUE_TYPE f2;
1218 FIXED_VALUE_TYPE result;
1219 tree t, type;
1220 int sat_p;
1221 bool overflow_p;
1222
1223 /* The following codes are handled by fixed_arithmetic. */
1224 switch (code)
1225 {
1226 case PLUS_EXPR:
1227 case MINUS_EXPR:
1228 case MULT_EXPR:
1229 case TRUNC_DIV_EXPR:
1230 if (TREE_CODE (arg2) != FIXED_CST)
1231 return NULL_TREE;
1232 f2 = TREE_FIXED_CST (arg2);
1233 break;
1234
1235 case LSHIFT_EXPR:
1236 case RSHIFT_EXPR:
1237 {
1238 if (TREE_CODE (arg2) != INTEGER_CST)
1239 return NULL_TREE;
1240 wide_int w2 = arg2;
1241 f2.data.high = w2.elt (1);
1242 f2.data.low = w2.elt (0);
1243 f2.mode = SImode;
1244 }
1245 break;
1246
1247 default:
1248 return NULL_TREE;
1249 }
1250
1251 f1 = TREE_FIXED_CST (arg1);
1252 type = TREE_TYPE (arg1);
1253 sat_p = TYPE_SATURATING (type);
1254 overflow_p = fixed_arithmetic (&result, code, &f1, &f2, sat_p);
1255 t = build_fixed (type, result);
1256 /* Propagate overflow flags. */
1257 if (overflow_p | TREE_OVERFLOW (arg1) | TREE_OVERFLOW (arg2))
1258 TREE_OVERFLOW (t) = 1;
1259 return t;
1260 }
1261
1262 if (TREE_CODE (arg1) == COMPLEX_CST && TREE_CODE (arg2) == COMPLEX_CST)
1263 {
1264 tree type = TREE_TYPE (arg1);
1265 tree r1 = TREE_REALPART (arg1);
1266 tree i1 = TREE_IMAGPART (arg1);
1267 tree r2 = TREE_REALPART (arg2);
1268 tree i2 = TREE_IMAGPART (arg2);
1269 tree real, imag;
1270
1271 switch (code)
1272 {
1273 case PLUS_EXPR:
1274 case MINUS_EXPR:
1275 real = const_binop (code, r1, r2);
1276 imag = const_binop (code, i1, i2);
1277 break;
1278
1279 case MULT_EXPR:
1280 if (COMPLEX_FLOAT_TYPE_P (type))
1281 return do_mpc_arg2 (arg1, arg2, type,
1282 /* do_nonfinite= */ folding_initializer,
1283 mpc_mul);
1284
1285 real = const_binop (MINUS_EXPR,
1286 const_binop (MULT_EXPR, r1, r2),
1287 const_binop (MULT_EXPR, i1, i2));
1288 imag = const_binop (PLUS_EXPR,
1289 const_binop (MULT_EXPR, r1, i2),
1290 const_binop (MULT_EXPR, i1, r2));
1291 break;
1292
1293 case RDIV_EXPR:
1294 if (COMPLEX_FLOAT_TYPE_P (type))
1295 return do_mpc_arg2 (arg1, arg2, type,
1296 /* do_nonfinite= */ folding_initializer,
1297 mpc_div);
1298 /* Fallthru ... */
1299 case TRUNC_DIV_EXPR:
1300 case CEIL_DIV_EXPR:
1301 case FLOOR_DIV_EXPR:
1302 case ROUND_DIV_EXPR:
1303 if (flag_complex_method == 0)
1304 {
1305 /* Keep this algorithm in sync with
1306 tree-complex.c:expand_complex_div_straight().
1307
1308 Expand complex division to scalars, straightforward algorithm.
1309 a / b = ((ar*br + ai*bi)/t) + i((ai*br - ar*bi)/t)
1310 t = br*br + bi*bi
1311 */
1312 tree magsquared
1313 = const_binop (PLUS_EXPR,
1314 const_binop (MULT_EXPR, r2, r2),
1315 const_binop (MULT_EXPR, i2, i2));
1316 tree t1
1317 = const_binop (PLUS_EXPR,
1318 const_binop (MULT_EXPR, r1, r2),
1319 const_binop (MULT_EXPR, i1, i2));
1320 tree t2
1321 = const_binop (MINUS_EXPR,
1322 const_binop (MULT_EXPR, i1, r2),
1323 const_binop (MULT_EXPR, r1, i2));
1324
1325 real = const_binop (code, t1, magsquared);
1326 imag = const_binop (code, t2, magsquared);
1327 }
1328 else
1329 {
1330 /* Keep this algorithm in sync with
1331 tree-complex.c:expand_complex_div_wide().
1332
1333 Expand complex division to scalars, modified algorithm to minimize
1334 overflow with wide input ranges. */
1335 tree compare = fold_build2 (LT_EXPR, boolean_type_node,
1336 fold_abs_const (r2, TREE_TYPE (type)),
1337 fold_abs_const (i2, TREE_TYPE (type)));
1338
1339 if (integer_nonzerop (compare))
1340 {
1341 /* In the TRUE branch, we compute
1342 ratio = br/bi;
1343 div = (br * ratio) + bi;
1344 tr = (ar * ratio) + ai;
1345 ti = (ai * ratio) - ar;
1346 tr = tr / div;
1347 ti = ti / div; */
1348 tree ratio = const_binop (code, r2, i2);
1349 tree div = const_binop (PLUS_EXPR, i2,
1350 const_binop (MULT_EXPR, r2, ratio));
1351 real = const_binop (MULT_EXPR, r1, ratio);
1352 real = const_binop (PLUS_EXPR, real, i1);
1353 real = const_binop (code, real, div);
1354
1355 imag = const_binop (MULT_EXPR, i1, ratio);
1356 imag = const_binop (MINUS_EXPR, imag, r1);
1357 imag = const_binop (code, imag, div);
1358 }
1359 else
1360 {
1361 /* In the FALSE branch, we compute
1362 ratio = d/c;
1363 divisor = (d * ratio) + c;
1364 tr = (b * ratio) + a;
1365 ti = b - (a * ratio);
1366 tr = tr / div;
1367 ti = ti / div; */
1368 tree ratio = const_binop (code, i2, r2);
1369 tree div = const_binop (PLUS_EXPR, r2,
1370 const_binop (MULT_EXPR, i2, ratio));
1371
1372 real = const_binop (MULT_EXPR, i1, ratio);
1373 real = const_binop (PLUS_EXPR, real, r1);
1374 real = const_binop (code, real, div);
1375
1376 imag = const_binop (MULT_EXPR, r1, ratio);
1377 imag = const_binop (MINUS_EXPR, i1, imag);
1378 imag = const_binop (code, imag, div);
1379 }
1380 }
1381 break;
1382
1383 default:
1384 return NULL_TREE;
1385 }
1386
1387 if (real && imag)
1388 return build_complex (type, real, imag);
1389 }
1390
1391 if (TREE_CODE (arg1) == VECTOR_CST
1392 && TREE_CODE (arg2) == VECTOR_CST)
1393 {
1394 tree type = TREE_TYPE (arg1);
1395 int count = TYPE_VECTOR_SUBPARTS (type), i;
1396 tree *elts = XALLOCAVEC (tree, count);
1397
1398 for (i = 0; i < count; i++)
1399 {
1400 tree elem1 = VECTOR_CST_ELT (arg1, i);
1401 tree elem2 = VECTOR_CST_ELT (arg2, i);
1402
1403 elts[i] = const_binop (code, elem1, elem2);
1404
1405 /* It is possible that const_binop cannot handle the given
1406 code and return NULL_TREE */
1407 if (elts[i] == NULL_TREE)
1408 return NULL_TREE;
1409 }
1410
1411 return build_vector (type, elts);
1412 }
1413
1414 /* Shifts allow a scalar offset for a vector. */
1415 if (TREE_CODE (arg1) == VECTOR_CST
1416 && TREE_CODE (arg2) == INTEGER_CST)
1417 {
1418 tree type = TREE_TYPE (arg1);
1419 int count = TYPE_VECTOR_SUBPARTS (type), i;
1420 tree *elts = XALLOCAVEC (tree, count);
1421
1422 for (i = 0; i < count; i++)
1423 {
1424 tree elem1 = VECTOR_CST_ELT (arg1, i);
1425
1426 elts[i] = const_binop (code, elem1, arg2);
1427
1428 /* It is possible that const_binop cannot handle the given
1429 code and return NULL_TREE. */
1430 if (elts[i] == NULL_TREE)
1431 return NULL_TREE;
1432 }
1433
1434 return build_vector (type, elts);
1435 }
1436 return NULL_TREE;
1437 }
1438
1439 /* Overload that adds a TYPE parameter to be able to dispatch
1440 to fold_relational_const. */
1441
1442 tree
1443 const_binop (enum tree_code code, tree type, tree arg1, tree arg2)
1444 {
1445 if (TREE_CODE_CLASS (code) == tcc_comparison)
1446 return fold_relational_const (code, type, arg1, arg2);
1447
1448 /* ??? Until we make the const_binop worker take the type of the
1449 result as argument put those cases that need it here. */
1450 switch (code)
1451 {
1452 case COMPLEX_EXPR:
1453 if ((TREE_CODE (arg1) == REAL_CST
1454 && TREE_CODE (arg2) == REAL_CST)
1455 || (TREE_CODE (arg1) == INTEGER_CST
1456 && TREE_CODE (arg2) == INTEGER_CST))
1457 return build_complex (type, arg1, arg2);
1458 return NULL_TREE;
1459
1460 case VEC_PACK_TRUNC_EXPR:
1461 case VEC_PACK_FIX_TRUNC_EXPR:
1462 {
1463 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1464 tree *elts;
1465
1466 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts / 2
1467 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts / 2);
1468 if (TREE_CODE (arg1) != VECTOR_CST
1469 || TREE_CODE (arg2) != VECTOR_CST)
1470 return NULL_TREE;
1471
1472 elts = XALLOCAVEC (tree, nelts);
1473 if (!vec_cst_ctor_to_array (arg1, elts)
1474 || !vec_cst_ctor_to_array (arg2, elts + nelts / 2))
1475 return NULL_TREE;
1476
1477 for (i = 0; i < nelts; i++)
1478 {
1479 elts[i] = fold_convert_const (code == VEC_PACK_TRUNC_EXPR
1480 ? NOP_EXPR : FIX_TRUNC_EXPR,
1481 TREE_TYPE (type), elts[i]);
1482 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1483 return NULL_TREE;
1484 }
1485
1486 return build_vector (type, elts);
1487 }
1488
1489 case VEC_WIDEN_MULT_LO_EXPR:
1490 case VEC_WIDEN_MULT_HI_EXPR:
1491 case VEC_WIDEN_MULT_EVEN_EXPR:
1492 case VEC_WIDEN_MULT_ODD_EXPR:
1493 {
1494 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type);
1495 unsigned int out, ofs, scale;
1496 tree *elts;
1497
1498 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts * 2
1499 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg2)) == nelts * 2);
1500 if (TREE_CODE (arg1) != VECTOR_CST || TREE_CODE (arg2) != VECTOR_CST)
1501 return NULL_TREE;
1502
1503 elts = XALLOCAVEC (tree, nelts * 4);
1504 if (!vec_cst_ctor_to_array (arg1, elts)
1505 || !vec_cst_ctor_to_array (arg2, elts + nelts * 2))
1506 return NULL_TREE;
1507
1508 if (code == VEC_WIDEN_MULT_LO_EXPR)
1509 scale = 0, ofs = BYTES_BIG_ENDIAN ? nelts : 0;
1510 else if (code == VEC_WIDEN_MULT_HI_EXPR)
1511 scale = 0, ofs = BYTES_BIG_ENDIAN ? 0 : nelts;
1512 else if (code == VEC_WIDEN_MULT_EVEN_EXPR)
1513 scale = 1, ofs = 0;
1514 else /* if (code == VEC_WIDEN_MULT_ODD_EXPR) */
1515 scale = 1, ofs = 1;
1516
1517 for (out = 0; out < nelts; out++)
1518 {
1519 unsigned int in1 = (out << scale) + ofs;
1520 unsigned int in2 = in1 + nelts * 2;
1521 tree t1, t2;
1522
1523 t1 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in1]);
1524 t2 = fold_convert_const (NOP_EXPR, TREE_TYPE (type), elts[in2]);
1525
1526 if (t1 == NULL_TREE || t2 == NULL_TREE)
1527 return NULL_TREE;
1528 elts[out] = const_binop (MULT_EXPR, t1, t2);
1529 if (elts[out] == NULL_TREE || !CONSTANT_CLASS_P (elts[out]))
1530 return NULL_TREE;
1531 }
1532
1533 return build_vector (type, elts);
1534 }
1535
1536 default:;
1537 }
1538
1539 if (TREE_CODE_CLASS (code) != tcc_binary)
1540 return NULL_TREE;
1541
1542 /* Make sure type and arg0 have the same saturating flag. */
1543 gcc_checking_assert (TYPE_SATURATING (type)
1544 == TYPE_SATURATING (TREE_TYPE (arg1)));
1545
1546 return const_binop (code, arg1, arg2);
1547 }
1548
1549 /* Compute CODE ARG1 with resulting type TYPE with ARG1 being constant.
1550 Return zero if computing the constants is not possible. */
1551
1552 tree
1553 const_unop (enum tree_code code, tree type, tree arg0)
1554 {
1555 /* Don't perform the operation, other than NEGATE and ABS, if
1556 flag_signaling_nans is on and the operand is a signaling NaN. */
1557 if (TREE_CODE (arg0) == REAL_CST
1558 && HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg0)))
1559 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg0))
1560 && code != NEGATE_EXPR
1561 && code != ABS_EXPR)
1562 return NULL_TREE;
1563
1564 switch (code)
1565 {
1566 CASE_CONVERT:
1567 case FLOAT_EXPR:
1568 case FIX_TRUNC_EXPR:
1569 case FIXED_CONVERT_EXPR:
1570 return fold_convert_const (code, type, arg0);
1571
1572 case ADDR_SPACE_CONVERT_EXPR:
1573 /* If the source address is 0, and the source address space
1574 cannot have a valid object at 0, fold to dest type null. */
1575 if (integer_zerop (arg0)
1576 && !(targetm.addr_space.zero_address_valid
1577 (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0))))))
1578 return fold_convert_const (code, type, arg0);
1579 break;
1580
1581 case VIEW_CONVERT_EXPR:
1582 return fold_view_convert_expr (type, arg0);
1583
1584 case NEGATE_EXPR:
1585 {
1586 /* Can't call fold_negate_const directly here as that doesn't
1587 handle all cases and we might not be able to negate some
1588 constants. */
1589 tree tem = fold_negate_expr (UNKNOWN_LOCATION, arg0);
1590 if (tem && CONSTANT_CLASS_P (tem))
1591 return tem;
1592 break;
1593 }
1594
1595 case ABS_EXPR:
1596 if (TREE_CODE (arg0) == INTEGER_CST || TREE_CODE (arg0) == REAL_CST)
1597 return fold_abs_const (arg0, type);
1598 break;
1599
1600 case CONJ_EXPR:
1601 if (TREE_CODE (arg0) == COMPLEX_CST)
1602 {
1603 tree ipart = fold_negate_const (TREE_IMAGPART (arg0),
1604 TREE_TYPE (type));
1605 return build_complex (type, TREE_REALPART (arg0), ipart);
1606 }
1607 break;
1608
1609 case BIT_NOT_EXPR:
1610 if (TREE_CODE (arg0) == INTEGER_CST)
1611 return fold_not_const (arg0, type);
1612 /* Perform BIT_NOT_EXPR on each element individually. */
1613 else if (TREE_CODE (arg0) == VECTOR_CST)
1614 {
1615 tree *elements;
1616 tree elem;
1617 unsigned count = VECTOR_CST_NELTS (arg0), i;
1618
1619 elements = XALLOCAVEC (tree, count);
1620 for (i = 0; i < count; i++)
1621 {
1622 elem = VECTOR_CST_ELT (arg0, i);
1623 elem = const_unop (BIT_NOT_EXPR, TREE_TYPE (type), elem);
1624 if (elem == NULL_TREE)
1625 break;
1626 elements[i] = elem;
1627 }
1628 if (i == count)
1629 return build_vector (type, elements);
1630 }
1631 break;
1632
1633 case TRUTH_NOT_EXPR:
1634 if (TREE_CODE (arg0) == INTEGER_CST)
1635 return constant_boolean_node (integer_zerop (arg0), type);
1636 break;
1637
1638 case REALPART_EXPR:
1639 if (TREE_CODE (arg0) == COMPLEX_CST)
1640 return fold_convert (type, TREE_REALPART (arg0));
1641 break;
1642
1643 case IMAGPART_EXPR:
1644 if (TREE_CODE (arg0) == COMPLEX_CST)
1645 return fold_convert (type, TREE_IMAGPART (arg0));
1646 break;
1647
1648 case VEC_UNPACK_LO_EXPR:
1649 case VEC_UNPACK_HI_EXPR:
1650 case VEC_UNPACK_FLOAT_LO_EXPR:
1651 case VEC_UNPACK_FLOAT_HI_EXPR:
1652 {
1653 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
1654 tree *elts;
1655 enum tree_code subcode;
1656
1657 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts * 2);
1658 if (TREE_CODE (arg0) != VECTOR_CST)
1659 return NULL_TREE;
1660
1661 elts = XALLOCAVEC (tree, nelts * 2);
1662 if (!vec_cst_ctor_to_array (arg0, elts))
1663 return NULL_TREE;
1664
1665 if ((!BYTES_BIG_ENDIAN) ^ (code == VEC_UNPACK_LO_EXPR
1666 || code == VEC_UNPACK_FLOAT_LO_EXPR))
1667 elts += nelts;
1668
1669 if (code == VEC_UNPACK_LO_EXPR || code == VEC_UNPACK_HI_EXPR)
1670 subcode = NOP_EXPR;
1671 else
1672 subcode = FLOAT_EXPR;
1673
1674 for (i = 0; i < nelts; i++)
1675 {
1676 elts[i] = fold_convert_const (subcode, TREE_TYPE (type), elts[i]);
1677 if (elts[i] == NULL_TREE || !CONSTANT_CLASS_P (elts[i]))
1678 return NULL_TREE;
1679 }
1680
1681 return build_vector (type, elts);
1682 }
1683
1684 case REDUC_MIN_EXPR:
1685 case REDUC_MAX_EXPR:
1686 case REDUC_PLUS_EXPR:
1687 {
1688 unsigned int nelts, i;
1689 tree *elts;
1690 enum tree_code subcode;
1691
1692 if (TREE_CODE (arg0) != VECTOR_CST)
1693 return NULL_TREE;
1694 nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0));
1695
1696 elts = XALLOCAVEC (tree, nelts);
1697 if (!vec_cst_ctor_to_array (arg0, elts))
1698 return NULL_TREE;
1699
1700 switch (code)
1701 {
1702 case REDUC_MIN_EXPR: subcode = MIN_EXPR; break;
1703 case REDUC_MAX_EXPR: subcode = MAX_EXPR; break;
1704 case REDUC_PLUS_EXPR: subcode = PLUS_EXPR; break;
1705 default: gcc_unreachable ();
1706 }
1707
1708 for (i = 1; i < nelts; i++)
1709 {
1710 elts[0] = const_binop (subcode, elts[0], elts[i]);
1711 if (elts[0] == NULL_TREE || !CONSTANT_CLASS_P (elts[0]))
1712 return NULL_TREE;
1713 }
1714
1715 return elts[0];
1716 }
1717
1718 default:
1719 break;
1720 }
1721
1722 return NULL_TREE;
1723 }
1724
1725 /* Create a sizetype INT_CST node with NUMBER sign extended. KIND
1726 indicates which particular sizetype to create. */
1727
1728 tree
1729 size_int_kind (HOST_WIDE_INT number, enum size_type_kind kind)
1730 {
1731 return build_int_cst (sizetype_tab[(int) kind], number);
1732 }
1733 \f
1734 /* Combine operands OP1 and OP2 with arithmetic operation CODE. CODE
1735 is a tree code. The type of the result is taken from the operands.
1736 Both must be equivalent integer types, ala int_binop_types_match_p.
1737 If the operands are constant, so is the result. */
1738
1739 tree
1740 size_binop_loc (location_t loc, enum tree_code code, tree arg0, tree arg1)
1741 {
1742 tree type = TREE_TYPE (arg0);
1743
1744 if (arg0 == error_mark_node || arg1 == error_mark_node)
1745 return error_mark_node;
1746
1747 gcc_assert (int_binop_types_match_p (code, TREE_TYPE (arg0),
1748 TREE_TYPE (arg1)));
1749
1750 /* Handle the special case of two integer constants faster. */
1751 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
1752 {
1753 /* And some specific cases even faster than that. */
1754 if (code == PLUS_EXPR)
1755 {
1756 if (integer_zerop (arg0) && !TREE_OVERFLOW (arg0))
1757 return arg1;
1758 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1759 return arg0;
1760 }
1761 else if (code == MINUS_EXPR)
1762 {
1763 if (integer_zerop (arg1) && !TREE_OVERFLOW (arg1))
1764 return arg0;
1765 }
1766 else if (code == MULT_EXPR)
1767 {
1768 if (integer_onep (arg0) && !TREE_OVERFLOW (arg0))
1769 return arg1;
1770 }
1771
1772 /* Handle general case of two integer constants. For sizetype
1773 constant calculations we always want to know about overflow,
1774 even in the unsigned case. */
1775 return int_const_binop_1 (code, arg0, arg1, -1);
1776 }
1777
1778 return fold_build2_loc (loc, code, type, arg0, arg1);
1779 }
1780
1781 /* Given two values, either both of sizetype or both of bitsizetype,
1782 compute the difference between the two values. Return the value
1783 in signed type corresponding to the type of the operands. */
1784
1785 tree
1786 size_diffop_loc (location_t loc, tree arg0, tree arg1)
1787 {
1788 tree type = TREE_TYPE (arg0);
1789 tree ctype;
1790
1791 gcc_assert (int_binop_types_match_p (MINUS_EXPR, TREE_TYPE (arg0),
1792 TREE_TYPE (arg1)));
1793
1794 /* If the type is already signed, just do the simple thing. */
1795 if (!TYPE_UNSIGNED (type))
1796 return size_binop_loc (loc, MINUS_EXPR, arg0, arg1);
1797
1798 if (type == sizetype)
1799 ctype = ssizetype;
1800 else if (type == bitsizetype)
1801 ctype = sbitsizetype;
1802 else
1803 ctype = signed_type_for (type);
1804
1805 /* If either operand is not a constant, do the conversions to the signed
1806 type and subtract. The hardware will do the right thing with any
1807 overflow in the subtraction. */
1808 if (TREE_CODE (arg0) != INTEGER_CST || TREE_CODE (arg1) != INTEGER_CST)
1809 return size_binop_loc (loc, MINUS_EXPR,
1810 fold_convert_loc (loc, ctype, arg0),
1811 fold_convert_loc (loc, ctype, arg1));
1812
1813 /* If ARG0 is larger than ARG1, subtract and return the result in CTYPE.
1814 Otherwise, subtract the other way, convert to CTYPE (we know that can't
1815 overflow) and negate (which can't either). Special-case a result
1816 of zero while we're here. */
1817 if (tree_int_cst_equal (arg0, arg1))
1818 return build_int_cst (ctype, 0);
1819 else if (tree_int_cst_lt (arg1, arg0))
1820 return fold_convert_loc (loc, ctype,
1821 size_binop_loc (loc, MINUS_EXPR, arg0, arg1));
1822 else
1823 return size_binop_loc (loc, MINUS_EXPR, build_int_cst (ctype, 0),
1824 fold_convert_loc (loc, ctype,
1825 size_binop_loc (loc,
1826 MINUS_EXPR,
1827 arg1, arg0)));
1828 }
1829 \f
1830 /* A subroutine of fold_convert_const handling conversions of an
1831 INTEGER_CST to another integer type. */
1832
1833 static tree
1834 fold_convert_const_int_from_int (tree type, const_tree arg1)
1835 {
1836 /* Given an integer constant, make new constant with new type,
1837 appropriately sign-extended or truncated. Use widest_int
1838 so that any extension is done according ARG1's type. */
1839 return force_fit_type (type, wi::to_widest (arg1),
1840 !POINTER_TYPE_P (TREE_TYPE (arg1)),
1841 TREE_OVERFLOW (arg1));
1842 }
1843
1844 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1845 to an integer type. */
1846
1847 static tree
1848 fold_convert_const_int_from_real (enum tree_code code, tree type, const_tree arg1)
1849 {
1850 bool overflow = false;
1851 tree t;
1852
1853 /* The following code implements the floating point to integer
1854 conversion rules required by the Java Language Specification,
1855 that IEEE NaNs are mapped to zero and values that overflow
1856 the target precision saturate, i.e. values greater than
1857 INT_MAX are mapped to INT_MAX, and values less than INT_MIN
1858 are mapped to INT_MIN. These semantics are allowed by the
1859 C and C++ standards that simply state that the behavior of
1860 FP-to-integer conversion is unspecified upon overflow. */
1861
1862 wide_int val;
1863 REAL_VALUE_TYPE r;
1864 REAL_VALUE_TYPE x = TREE_REAL_CST (arg1);
1865
1866 switch (code)
1867 {
1868 case FIX_TRUNC_EXPR:
1869 real_trunc (&r, VOIDmode, &x);
1870 break;
1871
1872 default:
1873 gcc_unreachable ();
1874 }
1875
1876 /* If R is NaN, return zero and show we have an overflow. */
1877 if (REAL_VALUE_ISNAN (r))
1878 {
1879 overflow = true;
1880 val = wi::zero (TYPE_PRECISION (type));
1881 }
1882
1883 /* See if R is less than the lower bound or greater than the
1884 upper bound. */
1885
1886 if (! overflow)
1887 {
1888 tree lt = TYPE_MIN_VALUE (type);
1889 REAL_VALUE_TYPE l = real_value_from_int_cst (NULL_TREE, lt);
1890 if (real_less (&r, &l))
1891 {
1892 overflow = true;
1893 val = lt;
1894 }
1895 }
1896
1897 if (! overflow)
1898 {
1899 tree ut = TYPE_MAX_VALUE (type);
1900 if (ut)
1901 {
1902 REAL_VALUE_TYPE u = real_value_from_int_cst (NULL_TREE, ut);
1903 if (real_less (&u, &r))
1904 {
1905 overflow = true;
1906 val = ut;
1907 }
1908 }
1909 }
1910
1911 if (! overflow)
1912 val = real_to_integer (&r, &overflow, TYPE_PRECISION (type));
1913
1914 t = force_fit_type (type, val, -1, overflow | TREE_OVERFLOW (arg1));
1915 return t;
1916 }
1917
1918 /* A subroutine of fold_convert_const handling conversions of a
1919 FIXED_CST to an integer type. */
1920
1921 static tree
1922 fold_convert_const_int_from_fixed (tree type, const_tree arg1)
1923 {
1924 tree t;
1925 double_int temp, temp_trunc;
1926 unsigned int mode;
1927
1928 /* Right shift FIXED_CST to temp by fbit. */
1929 temp = TREE_FIXED_CST (arg1).data;
1930 mode = TREE_FIXED_CST (arg1).mode;
1931 if (GET_MODE_FBIT (mode) < HOST_BITS_PER_DOUBLE_INT)
1932 {
1933 temp = temp.rshift (GET_MODE_FBIT (mode),
1934 HOST_BITS_PER_DOUBLE_INT,
1935 SIGNED_FIXED_POINT_MODE_P (mode));
1936
1937 /* Left shift temp to temp_trunc by fbit. */
1938 temp_trunc = temp.lshift (GET_MODE_FBIT (mode),
1939 HOST_BITS_PER_DOUBLE_INT,
1940 SIGNED_FIXED_POINT_MODE_P (mode));
1941 }
1942 else
1943 {
1944 temp = double_int_zero;
1945 temp_trunc = double_int_zero;
1946 }
1947
1948 /* If FIXED_CST is negative, we need to round the value toward 0.
1949 By checking if the fractional bits are not zero to add 1 to temp. */
1950 if (SIGNED_FIXED_POINT_MODE_P (mode)
1951 && temp_trunc.is_negative ()
1952 && TREE_FIXED_CST (arg1).data != temp_trunc)
1953 temp += double_int_one;
1954
1955 /* Given a fixed-point constant, make new constant with new type,
1956 appropriately sign-extended or truncated. */
1957 t = force_fit_type (type, temp, -1,
1958 (temp.is_negative ()
1959 && (TYPE_UNSIGNED (type)
1960 < TYPE_UNSIGNED (TREE_TYPE (arg1))))
1961 | TREE_OVERFLOW (arg1));
1962
1963 return t;
1964 }
1965
1966 /* A subroutine of fold_convert_const handling conversions a REAL_CST
1967 to another floating point type. */
1968
1969 static tree
1970 fold_convert_const_real_from_real (tree type, const_tree arg1)
1971 {
1972 REAL_VALUE_TYPE value;
1973 tree t;
1974
1975 /* Don't perform the operation if flag_signaling_nans is on
1976 and the operand is a signaling NaN. */
1977 if (HONOR_SNANS (TYPE_MODE (TREE_TYPE (arg1)))
1978 && REAL_VALUE_ISSIGNALING_NAN (TREE_REAL_CST (arg1)))
1979 return NULL_TREE;
1980
1981 real_convert (&value, TYPE_MODE (type), &TREE_REAL_CST (arg1));
1982 t = build_real (type, value);
1983
1984 /* If converting an infinity or NAN to a representation that doesn't
1985 have one, set the overflow bit so that we can produce some kind of
1986 error message at the appropriate point if necessary. It's not the
1987 most user-friendly message, but it's better than nothing. */
1988 if (REAL_VALUE_ISINF (TREE_REAL_CST (arg1))
1989 && !MODE_HAS_INFINITIES (TYPE_MODE (type)))
1990 TREE_OVERFLOW (t) = 1;
1991 else if (REAL_VALUE_ISNAN (TREE_REAL_CST (arg1))
1992 && !MODE_HAS_NANS (TYPE_MODE (type)))
1993 TREE_OVERFLOW (t) = 1;
1994 /* Regular overflow, conversion produced an infinity in a mode that
1995 can't represent them. */
1996 else if (!MODE_HAS_INFINITIES (TYPE_MODE (type))
1997 && REAL_VALUE_ISINF (value)
1998 && !REAL_VALUE_ISINF (TREE_REAL_CST (arg1)))
1999 TREE_OVERFLOW (t) = 1;
2000 else
2001 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2002 return t;
2003 }
2004
2005 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2006 to a floating point type. */
2007
2008 static tree
2009 fold_convert_const_real_from_fixed (tree type, const_tree arg1)
2010 {
2011 REAL_VALUE_TYPE value;
2012 tree t;
2013
2014 real_convert_from_fixed (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1));
2015 t = build_real (type, value);
2016
2017 TREE_OVERFLOW (t) = TREE_OVERFLOW (arg1);
2018 return t;
2019 }
2020
2021 /* A subroutine of fold_convert_const handling conversions a FIXED_CST
2022 to another fixed-point type. */
2023
2024 static tree
2025 fold_convert_const_fixed_from_fixed (tree type, const_tree arg1)
2026 {
2027 FIXED_VALUE_TYPE value;
2028 tree t;
2029 bool overflow_p;
2030
2031 overflow_p = fixed_convert (&value, TYPE_MODE (type), &TREE_FIXED_CST (arg1),
2032 TYPE_SATURATING (type));
2033 t = build_fixed (type, value);
2034
2035 /* Propagate overflow flags. */
2036 if (overflow_p | TREE_OVERFLOW (arg1))
2037 TREE_OVERFLOW (t) = 1;
2038 return t;
2039 }
2040
2041 /* A subroutine of fold_convert_const handling conversions an INTEGER_CST
2042 to a fixed-point type. */
2043
2044 static tree
2045 fold_convert_const_fixed_from_int (tree type, const_tree arg1)
2046 {
2047 FIXED_VALUE_TYPE value;
2048 tree t;
2049 bool overflow_p;
2050 double_int di;
2051
2052 gcc_assert (TREE_INT_CST_NUNITS (arg1) <= 2);
2053
2054 di.low = TREE_INT_CST_ELT (arg1, 0);
2055 if (TREE_INT_CST_NUNITS (arg1) == 1)
2056 di.high = (HOST_WIDE_INT) di.low < 0 ? (HOST_WIDE_INT) -1 : 0;
2057 else
2058 di.high = TREE_INT_CST_ELT (arg1, 1);
2059
2060 overflow_p = fixed_convert_from_int (&value, TYPE_MODE (type), di,
2061 TYPE_UNSIGNED (TREE_TYPE (arg1)),
2062 TYPE_SATURATING (type));
2063 t = build_fixed (type, value);
2064
2065 /* Propagate overflow flags. */
2066 if (overflow_p | TREE_OVERFLOW (arg1))
2067 TREE_OVERFLOW (t) = 1;
2068 return t;
2069 }
2070
2071 /* A subroutine of fold_convert_const handling conversions a REAL_CST
2072 to a fixed-point type. */
2073
2074 static tree
2075 fold_convert_const_fixed_from_real (tree type, const_tree arg1)
2076 {
2077 FIXED_VALUE_TYPE value;
2078 tree t;
2079 bool overflow_p;
2080
2081 overflow_p = fixed_convert_from_real (&value, TYPE_MODE (type),
2082 &TREE_REAL_CST (arg1),
2083 TYPE_SATURATING (type));
2084 t = build_fixed (type, value);
2085
2086 /* Propagate overflow flags. */
2087 if (overflow_p | TREE_OVERFLOW (arg1))
2088 TREE_OVERFLOW (t) = 1;
2089 return t;
2090 }
2091
2092 /* Attempt to fold type conversion operation CODE of expression ARG1 to
2093 type TYPE. If no simplification can be done return NULL_TREE. */
2094
2095 static tree
2096 fold_convert_const (enum tree_code code, tree type, tree arg1)
2097 {
2098 if (TREE_TYPE (arg1) == type)
2099 return arg1;
2100
2101 if (POINTER_TYPE_P (type) || INTEGRAL_TYPE_P (type)
2102 || TREE_CODE (type) == OFFSET_TYPE)
2103 {
2104 if (TREE_CODE (arg1) == INTEGER_CST)
2105 return fold_convert_const_int_from_int (type, arg1);
2106 else if (TREE_CODE (arg1) == REAL_CST)
2107 return fold_convert_const_int_from_real (code, type, arg1);
2108 else if (TREE_CODE (arg1) == FIXED_CST)
2109 return fold_convert_const_int_from_fixed (type, arg1);
2110 }
2111 else if (TREE_CODE (type) == REAL_TYPE)
2112 {
2113 if (TREE_CODE (arg1) == INTEGER_CST)
2114 return build_real_from_int_cst (type, arg1);
2115 else if (TREE_CODE (arg1) == REAL_CST)
2116 return fold_convert_const_real_from_real (type, arg1);
2117 else if (TREE_CODE (arg1) == FIXED_CST)
2118 return fold_convert_const_real_from_fixed (type, arg1);
2119 }
2120 else if (TREE_CODE (type) == FIXED_POINT_TYPE)
2121 {
2122 if (TREE_CODE (arg1) == FIXED_CST)
2123 return fold_convert_const_fixed_from_fixed (type, arg1);
2124 else if (TREE_CODE (arg1) == INTEGER_CST)
2125 return fold_convert_const_fixed_from_int (type, arg1);
2126 else if (TREE_CODE (arg1) == REAL_CST)
2127 return fold_convert_const_fixed_from_real (type, arg1);
2128 }
2129 else if (TREE_CODE (type) == VECTOR_TYPE)
2130 {
2131 if (TREE_CODE (arg1) == VECTOR_CST
2132 && TYPE_VECTOR_SUBPARTS (type) == VECTOR_CST_NELTS (arg1))
2133 {
2134 int len = TYPE_VECTOR_SUBPARTS (type);
2135 tree elttype = TREE_TYPE (type);
2136 tree *v = XALLOCAVEC (tree, len);
2137 for (int i = 0; i < len; ++i)
2138 {
2139 tree elt = VECTOR_CST_ELT (arg1, i);
2140 tree cvt = fold_convert_const (code, elttype, elt);
2141 if (cvt == NULL_TREE)
2142 return NULL_TREE;
2143 v[i] = cvt;
2144 }
2145 return build_vector (type, v);
2146 }
2147 }
2148 return NULL_TREE;
2149 }
2150
2151 /* Construct a vector of zero elements of vector type TYPE. */
2152
2153 static tree
2154 build_zero_vector (tree type)
2155 {
2156 tree t;
2157
2158 t = fold_convert_const (NOP_EXPR, TREE_TYPE (type), integer_zero_node);
2159 return build_vector_from_val (type, t);
2160 }
2161
2162 /* Returns true, if ARG is convertible to TYPE using a NOP_EXPR. */
2163
2164 bool
2165 fold_convertible_p (const_tree type, const_tree arg)
2166 {
2167 tree orig = TREE_TYPE (arg);
2168
2169 if (type == orig)
2170 return true;
2171
2172 if (TREE_CODE (arg) == ERROR_MARK
2173 || TREE_CODE (type) == ERROR_MARK
2174 || TREE_CODE (orig) == ERROR_MARK)
2175 return false;
2176
2177 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2178 return true;
2179
2180 switch (TREE_CODE (type))
2181 {
2182 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2183 case POINTER_TYPE: case REFERENCE_TYPE:
2184 case OFFSET_TYPE:
2185 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2186 || TREE_CODE (orig) == OFFSET_TYPE)
2187 return true;
2188 return (TREE_CODE (orig) == VECTOR_TYPE
2189 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2190
2191 case REAL_TYPE:
2192 case FIXED_POINT_TYPE:
2193 case COMPLEX_TYPE:
2194 case VECTOR_TYPE:
2195 case VOID_TYPE:
2196 return TREE_CODE (type) == TREE_CODE (orig);
2197
2198 default:
2199 return false;
2200 }
2201 }
2202
2203 /* Convert expression ARG to type TYPE. Used by the middle-end for
2204 simple conversions in preference to calling the front-end's convert. */
2205
2206 tree
2207 fold_convert_loc (location_t loc, tree type, tree arg)
2208 {
2209 tree orig = TREE_TYPE (arg);
2210 tree tem;
2211
2212 if (type == orig)
2213 return arg;
2214
2215 if (TREE_CODE (arg) == ERROR_MARK
2216 || TREE_CODE (type) == ERROR_MARK
2217 || TREE_CODE (orig) == ERROR_MARK)
2218 return error_mark_node;
2219
2220 switch (TREE_CODE (type))
2221 {
2222 case POINTER_TYPE:
2223 case REFERENCE_TYPE:
2224 /* Handle conversions between pointers to different address spaces. */
2225 if (POINTER_TYPE_P (orig)
2226 && (TYPE_ADDR_SPACE (TREE_TYPE (type))
2227 != TYPE_ADDR_SPACE (TREE_TYPE (orig))))
2228 return fold_build1_loc (loc, ADDR_SPACE_CONVERT_EXPR, type, arg);
2229 /* fall through */
2230
2231 case INTEGER_TYPE: case ENUMERAL_TYPE: case BOOLEAN_TYPE:
2232 case OFFSET_TYPE:
2233 if (TREE_CODE (arg) == INTEGER_CST)
2234 {
2235 tem = fold_convert_const (NOP_EXPR, type, arg);
2236 if (tem != NULL_TREE)
2237 return tem;
2238 }
2239 if (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2240 || TREE_CODE (orig) == OFFSET_TYPE)
2241 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2242 if (TREE_CODE (orig) == COMPLEX_TYPE)
2243 return fold_convert_loc (loc, type,
2244 fold_build1_loc (loc, REALPART_EXPR,
2245 TREE_TYPE (orig), arg));
2246 gcc_assert (TREE_CODE (orig) == VECTOR_TYPE
2247 && tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2248 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2249
2250 case REAL_TYPE:
2251 if (TREE_CODE (arg) == INTEGER_CST)
2252 {
2253 tem = fold_convert_const (FLOAT_EXPR, type, arg);
2254 if (tem != NULL_TREE)
2255 return tem;
2256 }
2257 else if (TREE_CODE (arg) == REAL_CST)
2258 {
2259 tem = fold_convert_const (NOP_EXPR, type, arg);
2260 if (tem != NULL_TREE)
2261 return tem;
2262 }
2263 else if (TREE_CODE (arg) == FIXED_CST)
2264 {
2265 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2266 if (tem != NULL_TREE)
2267 return tem;
2268 }
2269
2270 switch (TREE_CODE (orig))
2271 {
2272 case INTEGER_TYPE:
2273 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2274 case POINTER_TYPE: case REFERENCE_TYPE:
2275 return fold_build1_loc (loc, FLOAT_EXPR, type, arg);
2276
2277 case REAL_TYPE:
2278 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2279
2280 case FIXED_POINT_TYPE:
2281 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2282
2283 case COMPLEX_TYPE:
2284 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2285 return fold_convert_loc (loc, type, tem);
2286
2287 default:
2288 gcc_unreachable ();
2289 }
2290
2291 case FIXED_POINT_TYPE:
2292 if (TREE_CODE (arg) == FIXED_CST || TREE_CODE (arg) == INTEGER_CST
2293 || TREE_CODE (arg) == REAL_CST)
2294 {
2295 tem = fold_convert_const (FIXED_CONVERT_EXPR, type, arg);
2296 if (tem != NULL_TREE)
2297 goto fold_convert_exit;
2298 }
2299
2300 switch (TREE_CODE (orig))
2301 {
2302 case FIXED_POINT_TYPE:
2303 case INTEGER_TYPE:
2304 case ENUMERAL_TYPE:
2305 case BOOLEAN_TYPE:
2306 case REAL_TYPE:
2307 return fold_build1_loc (loc, FIXED_CONVERT_EXPR, type, arg);
2308
2309 case COMPLEX_TYPE:
2310 tem = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2311 return fold_convert_loc (loc, type, tem);
2312
2313 default:
2314 gcc_unreachable ();
2315 }
2316
2317 case COMPLEX_TYPE:
2318 switch (TREE_CODE (orig))
2319 {
2320 case INTEGER_TYPE:
2321 case BOOLEAN_TYPE: case ENUMERAL_TYPE:
2322 case POINTER_TYPE: case REFERENCE_TYPE:
2323 case REAL_TYPE:
2324 case FIXED_POINT_TYPE:
2325 return fold_build2_loc (loc, COMPLEX_EXPR, type,
2326 fold_convert_loc (loc, TREE_TYPE (type), arg),
2327 fold_convert_loc (loc, TREE_TYPE (type),
2328 integer_zero_node));
2329 case COMPLEX_TYPE:
2330 {
2331 tree rpart, ipart;
2332
2333 if (TREE_CODE (arg) == COMPLEX_EXPR)
2334 {
2335 rpart = fold_convert_loc (loc, TREE_TYPE (type),
2336 TREE_OPERAND (arg, 0));
2337 ipart = fold_convert_loc (loc, TREE_TYPE (type),
2338 TREE_OPERAND (arg, 1));
2339 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2340 }
2341
2342 arg = save_expr (arg);
2343 rpart = fold_build1_loc (loc, REALPART_EXPR, TREE_TYPE (orig), arg);
2344 ipart = fold_build1_loc (loc, IMAGPART_EXPR, TREE_TYPE (orig), arg);
2345 rpart = fold_convert_loc (loc, TREE_TYPE (type), rpart);
2346 ipart = fold_convert_loc (loc, TREE_TYPE (type), ipart);
2347 return fold_build2_loc (loc, COMPLEX_EXPR, type, rpart, ipart);
2348 }
2349
2350 default:
2351 gcc_unreachable ();
2352 }
2353
2354 case VECTOR_TYPE:
2355 if (integer_zerop (arg))
2356 return build_zero_vector (type);
2357 gcc_assert (tree_int_cst_equal (TYPE_SIZE (type), TYPE_SIZE (orig)));
2358 gcc_assert (INTEGRAL_TYPE_P (orig) || POINTER_TYPE_P (orig)
2359 || TREE_CODE (orig) == VECTOR_TYPE);
2360 return fold_build1_loc (loc, VIEW_CONVERT_EXPR, type, arg);
2361
2362 case VOID_TYPE:
2363 tem = fold_ignored_result (arg);
2364 return fold_build1_loc (loc, NOP_EXPR, type, tem);
2365
2366 default:
2367 if (TYPE_MAIN_VARIANT (type) == TYPE_MAIN_VARIANT (orig))
2368 return fold_build1_loc (loc, NOP_EXPR, type, arg);
2369 gcc_unreachable ();
2370 }
2371 fold_convert_exit:
2372 protected_set_expr_location_unshare (tem, loc);
2373 return tem;
2374 }
2375 \f
2376 /* Return false if expr can be assumed not to be an lvalue, true
2377 otherwise. */
2378
2379 static bool
2380 maybe_lvalue_p (const_tree x)
2381 {
2382 /* We only need to wrap lvalue tree codes. */
2383 switch (TREE_CODE (x))
2384 {
2385 case VAR_DECL:
2386 case PARM_DECL:
2387 case RESULT_DECL:
2388 case LABEL_DECL:
2389 case FUNCTION_DECL:
2390 case SSA_NAME:
2391
2392 case COMPONENT_REF:
2393 case MEM_REF:
2394 case INDIRECT_REF:
2395 case ARRAY_REF:
2396 case ARRAY_RANGE_REF:
2397 case BIT_FIELD_REF:
2398 case OBJ_TYPE_REF:
2399
2400 case REALPART_EXPR:
2401 case IMAGPART_EXPR:
2402 case PREINCREMENT_EXPR:
2403 case PREDECREMENT_EXPR:
2404 case SAVE_EXPR:
2405 case TRY_CATCH_EXPR:
2406 case WITH_CLEANUP_EXPR:
2407 case COMPOUND_EXPR:
2408 case MODIFY_EXPR:
2409 case TARGET_EXPR:
2410 case COND_EXPR:
2411 case BIND_EXPR:
2412 break;
2413
2414 default:
2415 /* Assume the worst for front-end tree codes. */
2416 if ((int)TREE_CODE (x) >= NUM_TREE_CODES)
2417 break;
2418 return false;
2419 }
2420
2421 return true;
2422 }
2423
2424 /* Return an expr equal to X but certainly not valid as an lvalue. */
2425
2426 tree
2427 non_lvalue_loc (location_t loc, tree x)
2428 {
2429 /* While we are in GIMPLE, NON_LVALUE_EXPR doesn't mean anything to
2430 us. */
2431 if (in_gimple_form)
2432 return x;
2433
2434 if (! maybe_lvalue_p (x))
2435 return x;
2436 return build1_loc (loc, NON_LVALUE_EXPR, TREE_TYPE (x), x);
2437 }
2438
2439 /* When pedantic, return an expr equal to X but certainly not valid as a
2440 pedantic lvalue. Otherwise, return X. */
2441
2442 static tree
2443 pedantic_non_lvalue_loc (location_t loc, tree x)
2444 {
2445 return protected_set_expr_location_unshare (x, loc);
2446 }
2447 \f
2448 /* Given a tree comparison code, return the code that is the logical inverse.
2449 It is generally not safe to do this for floating-point comparisons, except
2450 for EQ_EXPR, NE_EXPR, ORDERED_EXPR and UNORDERED_EXPR, so we return
2451 ERROR_MARK in this case. */
2452
2453 enum tree_code
2454 invert_tree_comparison (enum tree_code code, bool honor_nans)
2455 {
2456 if (honor_nans && flag_trapping_math && code != EQ_EXPR && code != NE_EXPR
2457 && code != ORDERED_EXPR && code != UNORDERED_EXPR)
2458 return ERROR_MARK;
2459
2460 switch (code)
2461 {
2462 case EQ_EXPR:
2463 return NE_EXPR;
2464 case NE_EXPR:
2465 return EQ_EXPR;
2466 case GT_EXPR:
2467 return honor_nans ? UNLE_EXPR : LE_EXPR;
2468 case GE_EXPR:
2469 return honor_nans ? UNLT_EXPR : LT_EXPR;
2470 case LT_EXPR:
2471 return honor_nans ? UNGE_EXPR : GE_EXPR;
2472 case LE_EXPR:
2473 return honor_nans ? UNGT_EXPR : GT_EXPR;
2474 case LTGT_EXPR:
2475 return UNEQ_EXPR;
2476 case UNEQ_EXPR:
2477 return LTGT_EXPR;
2478 case UNGT_EXPR:
2479 return LE_EXPR;
2480 case UNGE_EXPR:
2481 return LT_EXPR;
2482 case UNLT_EXPR:
2483 return GE_EXPR;
2484 case UNLE_EXPR:
2485 return GT_EXPR;
2486 case ORDERED_EXPR:
2487 return UNORDERED_EXPR;
2488 case UNORDERED_EXPR:
2489 return ORDERED_EXPR;
2490 default:
2491 gcc_unreachable ();
2492 }
2493 }
2494
2495 /* Similar, but return the comparison that results if the operands are
2496 swapped. This is safe for floating-point. */
2497
2498 enum tree_code
2499 swap_tree_comparison (enum tree_code code)
2500 {
2501 switch (code)
2502 {
2503 case EQ_EXPR:
2504 case NE_EXPR:
2505 case ORDERED_EXPR:
2506 case UNORDERED_EXPR:
2507 case LTGT_EXPR:
2508 case UNEQ_EXPR:
2509 return code;
2510 case GT_EXPR:
2511 return LT_EXPR;
2512 case GE_EXPR:
2513 return LE_EXPR;
2514 case LT_EXPR:
2515 return GT_EXPR;
2516 case LE_EXPR:
2517 return GE_EXPR;
2518 case UNGT_EXPR:
2519 return UNLT_EXPR;
2520 case UNGE_EXPR:
2521 return UNLE_EXPR;
2522 case UNLT_EXPR:
2523 return UNGT_EXPR;
2524 case UNLE_EXPR:
2525 return UNGE_EXPR;
2526 default:
2527 gcc_unreachable ();
2528 }
2529 }
2530
2531
2532 /* Convert a comparison tree code from an enum tree_code representation
2533 into a compcode bit-based encoding. This function is the inverse of
2534 compcode_to_comparison. */
2535
2536 static enum comparison_code
2537 comparison_to_compcode (enum tree_code code)
2538 {
2539 switch (code)
2540 {
2541 case LT_EXPR:
2542 return COMPCODE_LT;
2543 case EQ_EXPR:
2544 return COMPCODE_EQ;
2545 case LE_EXPR:
2546 return COMPCODE_LE;
2547 case GT_EXPR:
2548 return COMPCODE_GT;
2549 case NE_EXPR:
2550 return COMPCODE_NE;
2551 case GE_EXPR:
2552 return COMPCODE_GE;
2553 case ORDERED_EXPR:
2554 return COMPCODE_ORD;
2555 case UNORDERED_EXPR:
2556 return COMPCODE_UNORD;
2557 case UNLT_EXPR:
2558 return COMPCODE_UNLT;
2559 case UNEQ_EXPR:
2560 return COMPCODE_UNEQ;
2561 case UNLE_EXPR:
2562 return COMPCODE_UNLE;
2563 case UNGT_EXPR:
2564 return COMPCODE_UNGT;
2565 case LTGT_EXPR:
2566 return COMPCODE_LTGT;
2567 case UNGE_EXPR:
2568 return COMPCODE_UNGE;
2569 default:
2570 gcc_unreachable ();
2571 }
2572 }
2573
2574 /* Convert a compcode bit-based encoding of a comparison operator back
2575 to GCC's enum tree_code representation. This function is the
2576 inverse of comparison_to_compcode. */
2577
2578 static enum tree_code
2579 compcode_to_comparison (enum comparison_code code)
2580 {
2581 switch (code)
2582 {
2583 case COMPCODE_LT:
2584 return LT_EXPR;
2585 case COMPCODE_EQ:
2586 return EQ_EXPR;
2587 case COMPCODE_LE:
2588 return LE_EXPR;
2589 case COMPCODE_GT:
2590 return GT_EXPR;
2591 case COMPCODE_NE:
2592 return NE_EXPR;
2593 case COMPCODE_GE:
2594 return GE_EXPR;
2595 case COMPCODE_ORD:
2596 return ORDERED_EXPR;
2597 case COMPCODE_UNORD:
2598 return UNORDERED_EXPR;
2599 case COMPCODE_UNLT:
2600 return UNLT_EXPR;
2601 case COMPCODE_UNEQ:
2602 return UNEQ_EXPR;
2603 case COMPCODE_UNLE:
2604 return UNLE_EXPR;
2605 case COMPCODE_UNGT:
2606 return UNGT_EXPR;
2607 case COMPCODE_LTGT:
2608 return LTGT_EXPR;
2609 case COMPCODE_UNGE:
2610 return UNGE_EXPR;
2611 default:
2612 gcc_unreachable ();
2613 }
2614 }
2615
2616 /* Return a tree for the comparison which is the combination of
2617 doing the AND or OR (depending on CODE) of the two operations LCODE
2618 and RCODE on the identical operands LL_ARG and LR_ARG. Take into account
2619 the possibility of trapping if the mode has NaNs, and return NULL_TREE
2620 if this makes the transformation invalid. */
2621
2622 tree
2623 combine_comparisons (location_t loc,
2624 enum tree_code code, enum tree_code lcode,
2625 enum tree_code rcode, tree truth_type,
2626 tree ll_arg, tree lr_arg)
2627 {
2628 bool honor_nans = HONOR_NANS (ll_arg);
2629 enum comparison_code lcompcode = comparison_to_compcode (lcode);
2630 enum comparison_code rcompcode = comparison_to_compcode (rcode);
2631 int compcode;
2632
2633 switch (code)
2634 {
2635 case TRUTH_AND_EXPR: case TRUTH_ANDIF_EXPR:
2636 compcode = lcompcode & rcompcode;
2637 break;
2638
2639 case TRUTH_OR_EXPR: case TRUTH_ORIF_EXPR:
2640 compcode = lcompcode | rcompcode;
2641 break;
2642
2643 default:
2644 return NULL_TREE;
2645 }
2646
2647 if (!honor_nans)
2648 {
2649 /* Eliminate unordered comparisons, as well as LTGT and ORD
2650 which are not used unless the mode has NaNs. */
2651 compcode &= ~COMPCODE_UNORD;
2652 if (compcode == COMPCODE_LTGT)
2653 compcode = COMPCODE_NE;
2654 else if (compcode == COMPCODE_ORD)
2655 compcode = COMPCODE_TRUE;
2656 }
2657 else if (flag_trapping_math)
2658 {
2659 /* Check that the original operation and the optimized ones will trap
2660 under the same condition. */
2661 bool ltrap = (lcompcode & COMPCODE_UNORD) == 0
2662 && (lcompcode != COMPCODE_EQ)
2663 && (lcompcode != COMPCODE_ORD);
2664 bool rtrap = (rcompcode & COMPCODE_UNORD) == 0
2665 && (rcompcode != COMPCODE_EQ)
2666 && (rcompcode != COMPCODE_ORD);
2667 bool trap = (compcode & COMPCODE_UNORD) == 0
2668 && (compcode != COMPCODE_EQ)
2669 && (compcode != COMPCODE_ORD);
2670
2671 /* In a short-circuited boolean expression the LHS might be
2672 such that the RHS, if evaluated, will never trap. For
2673 example, in ORD (x, y) && (x < y), we evaluate the RHS only
2674 if neither x nor y is NaN. (This is a mixed blessing: for
2675 example, the expression above will never trap, hence
2676 optimizing it to x < y would be invalid). */
2677 if ((code == TRUTH_ORIF_EXPR && (lcompcode & COMPCODE_UNORD))
2678 || (code == TRUTH_ANDIF_EXPR && !(lcompcode & COMPCODE_UNORD)))
2679 rtrap = false;
2680
2681 /* If the comparison was short-circuited, and only the RHS
2682 trapped, we may now generate a spurious trap. */
2683 if (rtrap && !ltrap
2684 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
2685 return NULL_TREE;
2686
2687 /* If we changed the conditions that cause a trap, we lose. */
2688 if ((ltrap || rtrap) != trap)
2689 return NULL_TREE;
2690 }
2691
2692 if (compcode == COMPCODE_TRUE)
2693 return constant_boolean_node (true, truth_type);
2694 else if (compcode == COMPCODE_FALSE)
2695 return constant_boolean_node (false, truth_type);
2696 else
2697 {
2698 enum tree_code tcode;
2699
2700 tcode = compcode_to_comparison ((enum comparison_code) compcode);
2701 return fold_build2_loc (loc, tcode, truth_type, ll_arg, lr_arg);
2702 }
2703 }
2704 \f
2705 /* Return nonzero if two operands (typically of the same tree node)
2706 are necessarily equal. FLAGS modifies behavior as follows:
2707
2708 If OEP_ONLY_CONST is set, only return nonzero for constants.
2709 This function tests whether the operands are indistinguishable;
2710 it does not test whether they are equal using C's == operation.
2711 The distinction is important for IEEE floating point, because
2712 (1) -0.0 and 0.0 are distinguishable, but -0.0==0.0, and
2713 (2) two NaNs may be indistinguishable, but NaN!=NaN.
2714
2715 If OEP_ONLY_CONST is unset, a VAR_DECL is considered equal to itself
2716 even though it may hold multiple values during a function.
2717 This is because a GCC tree node guarantees that nothing else is
2718 executed between the evaluation of its "operands" (which may often
2719 be evaluated in arbitrary order). Hence if the operands themselves
2720 don't side-effect, the VAR_DECLs, PARM_DECLs etc... must hold the
2721 same value in each operand/subexpression. Hence leaving OEP_ONLY_CONST
2722 unset means assuming isochronic (or instantaneous) tree equivalence.
2723 Unless comparing arbitrary expression trees, such as from different
2724 statements, this flag can usually be left unset.
2725
2726 If OEP_PURE_SAME is set, then pure functions with identical arguments
2727 are considered the same. It is used when the caller has other ways
2728 to ensure that global memory is unchanged in between.
2729
2730 If OEP_ADDRESS_OF is set, we are actually comparing addresses of objects,
2731 not values of expressions.
2732
2733 Unless OEP_MATCH_SIDE_EFFECTS is set, the function returns false on
2734 any operand with side effect. This is unnecesarily conservative in the
2735 case we know that arg0 and arg1 are in disjoint code paths (such as in
2736 ?: operator). In addition OEP_MATCH_SIDE_EFFECTS is used when comparing
2737 addresses with TREE_CONSTANT flag set so we know that &var == &var
2738 even if var is volatile. */
2739
2740 int
2741 operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
2742 {
2743 /* If either is ERROR_MARK, they aren't equal. */
2744 if (TREE_CODE (arg0) == ERROR_MARK || TREE_CODE (arg1) == ERROR_MARK
2745 || TREE_TYPE (arg0) == error_mark_node
2746 || TREE_TYPE (arg1) == error_mark_node)
2747 return 0;
2748
2749 /* Similar, if either does not have a type (like a released SSA name),
2750 they aren't equal. */
2751 if (!TREE_TYPE (arg0) || !TREE_TYPE (arg1))
2752 return 0;
2753
2754 /* We cannot consider pointers to different address space equal. */
2755 if (POINTER_TYPE_P (TREE_TYPE (arg0))
2756 && POINTER_TYPE_P (TREE_TYPE (arg1))
2757 && (TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg0)))
2758 != TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (arg1)))))
2759 return 0;
2760
2761 /* Check equality of integer constants before bailing out due to
2762 precision differences. */
2763 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
2764 {
2765 /* Address of INTEGER_CST is not defined; check that we did not forget
2766 to drop the OEP_ADDRESS_OF flags. */
2767 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2768 return tree_int_cst_equal (arg0, arg1);
2769 }
2770
2771 if (!(flags & OEP_ADDRESS_OF))
2772 {
2773 /* If both types don't have the same signedness, then we can't consider
2774 them equal. We must check this before the STRIP_NOPS calls
2775 because they may change the signedness of the arguments. As pointers
2776 strictly don't have a signedness, require either two pointers or
2777 two non-pointers as well. */
2778 if (TYPE_UNSIGNED (TREE_TYPE (arg0)) != TYPE_UNSIGNED (TREE_TYPE (arg1))
2779 || POINTER_TYPE_P (TREE_TYPE (arg0))
2780 != POINTER_TYPE_P (TREE_TYPE (arg1)))
2781 return 0;
2782
2783 /* If both types don't have the same precision, then it is not safe
2784 to strip NOPs. */
2785 if (element_precision (TREE_TYPE (arg0))
2786 != element_precision (TREE_TYPE (arg1)))
2787 return 0;
2788
2789 STRIP_NOPS (arg0);
2790 STRIP_NOPS (arg1);
2791 }
2792 #if 0
2793 /* FIXME: Fortran FE currently produce ADDR_EXPR of NOP_EXPR. Enable the
2794 sanity check once the issue is solved. */
2795 else
2796 /* Addresses of conversions and SSA_NAMEs (and many other things)
2797 are not defined. Check that we did not forget to drop the
2798 OEP_ADDRESS_OF/OEP_CONSTANT_ADDRESS_OF flags. */
2799 gcc_checking_assert (!CONVERT_EXPR_P (arg0) && !CONVERT_EXPR_P (arg1)
2800 && TREE_CODE (arg0) != SSA_NAME);
2801 #endif
2802
2803 /* In case both args are comparisons but with different comparison
2804 code, try to swap the comparison operands of one arg to produce
2805 a match and compare that variant. */
2806 if (TREE_CODE (arg0) != TREE_CODE (arg1)
2807 && COMPARISON_CLASS_P (arg0)
2808 && COMPARISON_CLASS_P (arg1))
2809 {
2810 enum tree_code swap_code = swap_tree_comparison (TREE_CODE (arg1));
2811
2812 if (TREE_CODE (arg0) == swap_code)
2813 return operand_equal_p (TREE_OPERAND (arg0, 0),
2814 TREE_OPERAND (arg1, 1), flags)
2815 && operand_equal_p (TREE_OPERAND (arg0, 1),
2816 TREE_OPERAND (arg1, 0), flags);
2817 }
2818
2819 if (TREE_CODE (arg0) != TREE_CODE (arg1))
2820 {
2821 /* NOP_EXPR and CONVERT_EXPR are considered equal. */
2822 if (CONVERT_EXPR_P (arg0) && CONVERT_EXPR_P (arg1))
2823 ;
2824 else if (flags & OEP_ADDRESS_OF)
2825 {
2826 /* If we are interested in comparing addresses ignore
2827 MEM_REF wrappings of the base that can appear just for
2828 TBAA reasons. */
2829 if (TREE_CODE (arg0) == MEM_REF
2830 && DECL_P (arg1)
2831 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ADDR_EXPR
2832 && TREE_OPERAND (TREE_OPERAND (arg0, 0), 0) == arg1
2833 && integer_zerop (TREE_OPERAND (arg0, 1)))
2834 return 1;
2835 else if (TREE_CODE (arg1) == MEM_REF
2836 && DECL_P (arg0)
2837 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ADDR_EXPR
2838 && TREE_OPERAND (TREE_OPERAND (arg1, 0), 0) == arg0
2839 && integer_zerop (TREE_OPERAND (arg1, 1)))
2840 return 1;
2841 return 0;
2842 }
2843 else
2844 return 0;
2845 }
2846
2847 /* When not checking adddresses, this is needed for conversions and for
2848 COMPONENT_REF. Might as well play it safe and always test this. */
2849 if (TREE_CODE (TREE_TYPE (arg0)) == ERROR_MARK
2850 || TREE_CODE (TREE_TYPE (arg1)) == ERROR_MARK
2851 || (TYPE_MODE (TREE_TYPE (arg0)) != TYPE_MODE (TREE_TYPE (arg1))
2852 && !(flags & OEP_ADDRESS_OF)))
2853 return 0;
2854
2855 /* If ARG0 and ARG1 are the same SAVE_EXPR, they are necessarily equal.
2856 We don't care about side effects in that case because the SAVE_EXPR
2857 takes care of that for us. In all other cases, two expressions are
2858 equal if they have no side effects. If we have two identical
2859 expressions with side effects that should be treated the same due
2860 to the only side effects being identical SAVE_EXPR's, that will
2861 be detected in the recursive calls below.
2862 If we are taking an invariant address of two identical objects
2863 they are necessarily equal as well. */
2864 if (arg0 == arg1 && ! (flags & OEP_ONLY_CONST)
2865 && (TREE_CODE (arg0) == SAVE_EXPR
2866 || (flags & OEP_MATCH_SIDE_EFFECTS)
2867 || (! TREE_SIDE_EFFECTS (arg0) && ! TREE_SIDE_EFFECTS (arg1))))
2868 return 1;
2869
2870 /* Next handle constant cases, those for which we can return 1 even
2871 if ONLY_CONST is set. */
2872 if (TREE_CONSTANT (arg0) && TREE_CONSTANT (arg1))
2873 switch (TREE_CODE (arg0))
2874 {
2875 case INTEGER_CST:
2876 return tree_int_cst_equal (arg0, arg1);
2877
2878 case FIXED_CST:
2879 return FIXED_VALUES_IDENTICAL (TREE_FIXED_CST (arg0),
2880 TREE_FIXED_CST (arg1));
2881
2882 case REAL_CST:
2883 if (real_identical (&TREE_REAL_CST (arg0), &TREE_REAL_CST (arg1)))
2884 return 1;
2885
2886
2887 if (!HONOR_SIGNED_ZEROS (arg0))
2888 {
2889 /* If we do not distinguish between signed and unsigned zero,
2890 consider them equal. */
2891 if (real_zerop (arg0) && real_zerop (arg1))
2892 return 1;
2893 }
2894 return 0;
2895
2896 case VECTOR_CST:
2897 {
2898 unsigned i;
2899
2900 if (VECTOR_CST_NELTS (arg0) != VECTOR_CST_NELTS (arg1))
2901 return 0;
2902
2903 for (i = 0; i < VECTOR_CST_NELTS (arg0); ++i)
2904 {
2905 if (!operand_equal_p (VECTOR_CST_ELT (arg0, i),
2906 VECTOR_CST_ELT (arg1, i), flags))
2907 return 0;
2908 }
2909 return 1;
2910 }
2911
2912 case COMPLEX_CST:
2913 return (operand_equal_p (TREE_REALPART (arg0), TREE_REALPART (arg1),
2914 flags)
2915 && operand_equal_p (TREE_IMAGPART (arg0), TREE_IMAGPART (arg1),
2916 flags));
2917
2918 case STRING_CST:
2919 return (TREE_STRING_LENGTH (arg0) == TREE_STRING_LENGTH (arg1)
2920 && ! memcmp (TREE_STRING_POINTER (arg0),
2921 TREE_STRING_POINTER (arg1),
2922 TREE_STRING_LENGTH (arg0)));
2923
2924 case ADDR_EXPR:
2925 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
2926 return operand_equal_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 0),
2927 flags | OEP_ADDRESS_OF
2928 | OEP_MATCH_SIDE_EFFECTS);
2929 case CONSTRUCTOR:
2930 /* In GIMPLE empty constructors are allowed in initializers of
2931 aggregates. */
2932 return (!vec_safe_length (CONSTRUCTOR_ELTS (arg0))
2933 && !vec_safe_length (CONSTRUCTOR_ELTS (arg1)));
2934 default:
2935 break;
2936 }
2937
2938 if (flags & OEP_ONLY_CONST)
2939 return 0;
2940
2941 /* Define macros to test an operand from arg0 and arg1 for equality and a
2942 variant that allows null and views null as being different from any
2943 non-null value. In the latter case, if either is null, the both
2944 must be; otherwise, do the normal comparison. */
2945 #define OP_SAME(N) operand_equal_p (TREE_OPERAND (arg0, N), \
2946 TREE_OPERAND (arg1, N), flags)
2947
2948 #define OP_SAME_WITH_NULL(N) \
2949 ((!TREE_OPERAND (arg0, N) || !TREE_OPERAND (arg1, N)) \
2950 ? TREE_OPERAND (arg0, N) == TREE_OPERAND (arg1, N) : OP_SAME (N))
2951
2952 switch (TREE_CODE_CLASS (TREE_CODE (arg0)))
2953 {
2954 case tcc_unary:
2955 /* Two conversions are equal only if signedness and modes match. */
2956 switch (TREE_CODE (arg0))
2957 {
2958 CASE_CONVERT:
2959 case FIX_TRUNC_EXPR:
2960 if (TYPE_UNSIGNED (TREE_TYPE (arg0))
2961 != TYPE_UNSIGNED (TREE_TYPE (arg1)))
2962 return 0;
2963 break;
2964 default:
2965 break;
2966 }
2967
2968 return OP_SAME (0);
2969
2970
2971 case tcc_comparison:
2972 case tcc_binary:
2973 if (OP_SAME (0) && OP_SAME (1))
2974 return 1;
2975
2976 /* For commutative ops, allow the other order. */
2977 return (commutative_tree_code (TREE_CODE (arg0))
2978 && operand_equal_p (TREE_OPERAND (arg0, 0),
2979 TREE_OPERAND (arg1, 1), flags)
2980 && operand_equal_p (TREE_OPERAND (arg0, 1),
2981 TREE_OPERAND (arg1, 0), flags));
2982
2983 case tcc_reference:
2984 /* If either of the pointer (or reference) expressions we are
2985 dereferencing contain a side effect, these cannot be equal,
2986 but their addresses can be. */
2987 if ((flags & OEP_MATCH_SIDE_EFFECTS) == 0
2988 && (TREE_SIDE_EFFECTS (arg0)
2989 || TREE_SIDE_EFFECTS (arg1)))
2990 return 0;
2991
2992 switch (TREE_CODE (arg0))
2993 {
2994 case INDIRECT_REF:
2995 if (!(flags & OEP_ADDRESS_OF)
2996 && (TYPE_ALIGN (TREE_TYPE (arg0))
2997 != TYPE_ALIGN (TREE_TYPE (arg1))))
2998 return 0;
2999 flags &= ~OEP_ADDRESS_OF;
3000 return OP_SAME (0);
3001
3002 case REALPART_EXPR:
3003 case IMAGPART_EXPR:
3004 case VIEW_CONVERT_EXPR:
3005 return OP_SAME (0);
3006
3007 case TARGET_MEM_REF:
3008 case MEM_REF:
3009 if (!(flags & OEP_ADDRESS_OF))
3010 {
3011 /* Require equal access sizes */
3012 if (TYPE_SIZE (TREE_TYPE (arg0)) != TYPE_SIZE (TREE_TYPE (arg1))
3013 && (!TYPE_SIZE (TREE_TYPE (arg0))
3014 || !TYPE_SIZE (TREE_TYPE (arg1))
3015 || !operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
3016 TYPE_SIZE (TREE_TYPE (arg1)),
3017 flags)))
3018 return 0;
3019 /* Verify that accesses are TBAA compatible. */
3020 if (!alias_ptr_types_compatible_p
3021 (TREE_TYPE (TREE_OPERAND (arg0, 1)),
3022 TREE_TYPE (TREE_OPERAND (arg1, 1)))
3023 || (MR_DEPENDENCE_CLIQUE (arg0)
3024 != MR_DEPENDENCE_CLIQUE (arg1))
3025 || (MR_DEPENDENCE_BASE (arg0)
3026 != MR_DEPENDENCE_BASE (arg1)))
3027 return 0;
3028 /* Verify that alignment is compatible. */
3029 if (TYPE_ALIGN (TREE_TYPE (arg0))
3030 != TYPE_ALIGN (TREE_TYPE (arg1)))
3031 return 0;
3032 }
3033 flags &= ~OEP_ADDRESS_OF;
3034 return (OP_SAME (0) && OP_SAME (1)
3035 /* TARGET_MEM_REF require equal extra operands. */
3036 && (TREE_CODE (arg0) != TARGET_MEM_REF
3037 || (OP_SAME_WITH_NULL (2)
3038 && OP_SAME_WITH_NULL (3)
3039 && OP_SAME_WITH_NULL (4))));
3040
3041 case ARRAY_REF:
3042 case ARRAY_RANGE_REF:
3043 /* Operands 2 and 3 may be null.
3044 Compare the array index by value if it is constant first as we
3045 may have different types but same value here. */
3046 if (!OP_SAME (0))
3047 return 0;
3048 flags &= ~OEP_ADDRESS_OF;
3049 return ((tree_int_cst_equal (TREE_OPERAND (arg0, 1),
3050 TREE_OPERAND (arg1, 1))
3051 || OP_SAME (1))
3052 && OP_SAME_WITH_NULL (2)
3053 && OP_SAME_WITH_NULL (3));
3054
3055 case COMPONENT_REF:
3056 /* Handle operand 2 the same as for ARRAY_REF. Operand 0
3057 may be NULL when we're called to compare MEM_EXPRs. */
3058 if (!OP_SAME_WITH_NULL (0)
3059 || !OP_SAME (1))
3060 return 0;
3061 flags &= ~OEP_ADDRESS_OF;
3062 return OP_SAME_WITH_NULL (2);
3063
3064 case BIT_FIELD_REF:
3065 if (!OP_SAME (0))
3066 return 0;
3067 flags &= ~OEP_ADDRESS_OF;
3068 return OP_SAME (1) && OP_SAME (2);
3069
3070 default:
3071 return 0;
3072 }
3073
3074 case tcc_expression:
3075 switch (TREE_CODE (arg0))
3076 {
3077 case ADDR_EXPR:
3078 /* Be sure we pass right ADDRESS_OF flag. */
3079 gcc_checking_assert (!(flags & OEP_ADDRESS_OF));
3080 return operand_equal_p (TREE_OPERAND (arg0, 0),
3081 TREE_OPERAND (arg1, 0),
3082 flags | OEP_ADDRESS_OF);
3083
3084 case TRUTH_NOT_EXPR:
3085 return OP_SAME (0);
3086
3087 case TRUTH_ANDIF_EXPR:
3088 case TRUTH_ORIF_EXPR:
3089 return OP_SAME (0) && OP_SAME (1);
3090
3091 case FMA_EXPR:
3092 case WIDEN_MULT_PLUS_EXPR:
3093 case WIDEN_MULT_MINUS_EXPR:
3094 if (!OP_SAME (2))
3095 return 0;
3096 /* The multiplcation operands are commutative. */
3097 /* FALLTHRU */
3098
3099 case TRUTH_AND_EXPR:
3100 case TRUTH_OR_EXPR:
3101 case TRUTH_XOR_EXPR:
3102 if (OP_SAME (0) && OP_SAME (1))
3103 return 1;
3104
3105 /* Otherwise take into account this is a commutative operation. */
3106 return (operand_equal_p (TREE_OPERAND (arg0, 0),
3107 TREE_OPERAND (arg1, 1), flags)
3108 && operand_equal_p (TREE_OPERAND (arg0, 1),
3109 TREE_OPERAND (arg1, 0), flags));
3110
3111 case COND_EXPR:
3112 case VEC_COND_EXPR:
3113 case DOT_PROD_EXPR:
3114 return OP_SAME (0) && OP_SAME (1) && OP_SAME (2);
3115
3116 default:
3117 return 0;
3118 }
3119
3120 case tcc_vl_exp:
3121 switch (TREE_CODE (arg0))
3122 {
3123 case CALL_EXPR:
3124 if ((CALL_EXPR_FN (arg0) == NULL_TREE)
3125 != (CALL_EXPR_FN (arg1) == NULL_TREE))
3126 /* If not both CALL_EXPRs are either internal or normal function
3127 functions, then they are not equal. */
3128 return 0;
3129 else if (CALL_EXPR_FN (arg0) == NULL_TREE)
3130 {
3131 /* If the CALL_EXPRs call different internal functions, then they
3132 are not equal. */
3133 if (CALL_EXPR_IFN (arg0) != CALL_EXPR_IFN (arg1))
3134 return 0;
3135 }
3136 else
3137 {
3138 /* If the CALL_EXPRs call different functions, then they are not
3139 equal. */
3140 if (! operand_equal_p (CALL_EXPR_FN (arg0), CALL_EXPR_FN (arg1),
3141 flags))
3142 return 0;
3143 }
3144
3145 /* FIXME: We could skip this test for OEP_MATCH_SIDE_EFFECTS. */
3146 {
3147 unsigned int cef = call_expr_flags (arg0);
3148 if (flags & OEP_PURE_SAME)
3149 cef &= ECF_CONST | ECF_PURE;
3150 else
3151 cef &= ECF_CONST;
3152 if (!cef)
3153 return 0;
3154 }
3155
3156 /* Now see if all the arguments are the same. */
3157 {
3158 const_call_expr_arg_iterator iter0, iter1;
3159 const_tree a0, a1;
3160 for (a0 = first_const_call_expr_arg (arg0, &iter0),
3161 a1 = first_const_call_expr_arg (arg1, &iter1);
3162 a0 && a1;
3163 a0 = next_const_call_expr_arg (&iter0),
3164 a1 = next_const_call_expr_arg (&iter1))
3165 if (! operand_equal_p (a0, a1, flags))
3166 return 0;
3167
3168 /* If we get here and both argument lists are exhausted
3169 then the CALL_EXPRs are equal. */
3170 return ! (a0 || a1);
3171 }
3172 default:
3173 return 0;
3174 }
3175
3176 case tcc_declaration:
3177 /* Consider __builtin_sqrt equal to sqrt. */
3178 return (TREE_CODE (arg0) == FUNCTION_DECL
3179 && DECL_BUILT_IN (arg0) && DECL_BUILT_IN (arg1)
3180 && DECL_BUILT_IN_CLASS (arg0) == DECL_BUILT_IN_CLASS (arg1)
3181 && DECL_FUNCTION_CODE (arg0) == DECL_FUNCTION_CODE (arg1));
3182
3183 case tcc_exceptional:
3184 if (TREE_CODE (arg0) == CONSTRUCTOR)
3185 {
3186 /* In GIMPLE constructors are used only to build vectors from
3187 elements. Individual elements in the constructor must be
3188 indexed in increasing order and form an initial sequence.
3189
3190 We make no effort to compare constructors in generic.
3191 (see sem_variable::equals in ipa-icf which can do so for
3192 constants). */
3193 if (!VECTOR_TYPE_P (TREE_TYPE (arg0))
3194 || !VECTOR_TYPE_P (TREE_TYPE (arg1)))
3195 return 0;
3196
3197 /* Be sure that vectors constructed have the same representation.
3198 We only tested element precision and modes to match.
3199 Vectors may be BLKmode and thus also check that the number of
3200 parts match. */
3201 if (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0))
3202 != TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)))
3203 return 0;
3204
3205 vec<constructor_elt, va_gc> *v0 = CONSTRUCTOR_ELTS (arg0);
3206 vec<constructor_elt, va_gc> *v1 = CONSTRUCTOR_ELTS (arg1);
3207 unsigned int len = vec_safe_length (v0);
3208
3209 if (len != vec_safe_length (v1))
3210 return 0;
3211
3212 for (unsigned int i = 0; i < len; i++)
3213 {
3214 constructor_elt *c0 = &(*v0)[i];
3215 constructor_elt *c1 = &(*v1)[i];
3216
3217 if (!operand_equal_p (c0->value, c1->value, flags)
3218 /* In GIMPLE the indexes can be either NULL or matching i.
3219 Double check this so we won't get false
3220 positives for GENERIC. */
3221 || (c0->index
3222 && (TREE_CODE (c0->index) != INTEGER_CST
3223 || !compare_tree_int (c0->index, i)))
3224 || (c1->index
3225 && (TREE_CODE (c1->index) != INTEGER_CST
3226 || !compare_tree_int (c1->index, i))))
3227 return 0;
3228 }
3229 return 1;
3230 }
3231 return 0;
3232
3233 default:
3234 return 0;
3235 }
3236
3237 #undef OP_SAME
3238 #undef OP_SAME_WITH_NULL
3239 }
3240 \f
3241 /* Similar to operand_equal_p, but see if ARG0 might have been made by
3242 shorten_compare from ARG1 when ARG1 was being compared with OTHER.
3243
3244 When in doubt, return 0. */
3245
3246 static int
3247 operand_equal_for_comparison_p (tree arg0, tree arg1, tree other)
3248 {
3249 int unsignedp1, unsignedpo;
3250 tree primarg0, primarg1, primother;
3251 unsigned int correct_width;
3252
3253 if (operand_equal_p (arg0, arg1, 0))
3254 return 1;
3255
3256 if (! INTEGRAL_TYPE_P (TREE_TYPE (arg0))
3257 || ! INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
3258 return 0;
3259
3260 /* Discard any conversions that don't change the modes of ARG0 and ARG1
3261 and see if the inner values are the same. This removes any
3262 signedness comparison, which doesn't matter here. */
3263 primarg0 = arg0, primarg1 = arg1;
3264 STRIP_NOPS (primarg0);
3265 STRIP_NOPS (primarg1);
3266 if (operand_equal_p (primarg0, primarg1, 0))
3267 return 1;
3268
3269 /* Duplicate what shorten_compare does to ARG1 and see if that gives the
3270 actual comparison operand, ARG0.
3271
3272 First throw away any conversions to wider types
3273 already present in the operands. */
3274
3275 primarg1 = get_narrower (arg1, &unsignedp1);
3276 primother = get_narrower (other, &unsignedpo);
3277
3278 correct_width = TYPE_PRECISION (TREE_TYPE (arg1));
3279 if (unsignedp1 == unsignedpo
3280 && TYPE_PRECISION (TREE_TYPE (primarg1)) < correct_width
3281 && TYPE_PRECISION (TREE_TYPE (primother)) < correct_width)
3282 {
3283 tree type = TREE_TYPE (arg0);
3284
3285 /* Make sure shorter operand is extended the right way
3286 to match the longer operand. */
3287 primarg1 = fold_convert (signed_or_unsigned_type_for
3288 (unsignedp1, TREE_TYPE (primarg1)), primarg1);
3289
3290 if (operand_equal_p (arg0, fold_convert (type, primarg1), 0))
3291 return 1;
3292 }
3293
3294 return 0;
3295 }
3296 \f
3297 /* See if ARG is an expression that is either a comparison or is performing
3298 arithmetic on comparisons. The comparisons must only be comparing
3299 two different values, which will be stored in *CVAL1 and *CVAL2; if
3300 they are nonzero it means that some operands have already been found.
3301 No variables may be used anywhere else in the expression except in the
3302 comparisons. If SAVE_P is true it means we removed a SAVE_EXPR around
3303 the expression and save_expr needs to be called with CVAL1 and CVAL2.
3304
3305 If this is true, return 1. Otherwise, return zero. */
3306
3307 static int
3308 twoval_comparison_p (tree arg, tree *cval1, tree *cval2, int *save_p)
3309 {
3310 enum tree_code code = TREE_CODE (arg);
3311 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3312
3313 /* We can handle some of the tcc_expression cases here. */
3314 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3315 tclass = tcc_unary;
3316 else if (tclass == tcc_expression
3317 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR
3318 || code == COMPOUND_EXPR))
3319 tclass = tcc_binary;
3320
3321 else if (tclass == tcc_expression && code == SAVE_EXPR
3322 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg, 0)))
3323 {
3324 /* If we've already found a CVAL1 or CVAL2, this expression is
3325 two complex to handle. */
3326 if (*cval1 || *cval2)
3327 return 0;
3328
3329 tclass = tcc_unary;
3330 *save_p = 1;
3331 }
3332
3333 switch (tclass)
3334 {
3335 case tcc_unary:
3336 return twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p);
3337
3338 case tcc_binary:
3339 return (twoval_comparison_p (TREE_OPERAND (arg, 0), cval1, cval2, save_p)
3340 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3341 cval1, cval2, save_p));
3342
3343 case tcc_constant:
3344 return 1;
3345
3346 case tcc_expression:
3347 if (code == COND_EXPR)
3348 return (twoval_comparison_p (TREE_OPERAND (arg, 0),
3349 cval1, cval2, save_p)
3350 && twoval_comparison_p (TREE_OPERAND (arg, 1),
3351 cval1, cval2, save_p)
3352 && twoval_comparison_p (TREE_OPERAND (arg, 2),
3353 cval1, cval2, save_p));
3354 return 0;
3355
3356 case tcc_comparison:
3357 /* First see if we can handle the first operand, then the second. For
3358 the second operand, we know *CVAL1 can't be zero. It must be that
3359 one side of the comparison is each of the values; test for the
3360 case where this isn't true by failing if the two operands
3361 are the same. */
3362
3363 if (operand_equal_p (TREE_OPERAND (arg, 0),
3364 TREE_OPERAND (arg, 1), 0))
3365 return 0;
3366
3367 if (*cval1 == 0)
3368 *cval1 = TREE_OPERAND (arg, 0);
3369 else if (operand_equal_p (*cval1, TREE_OPERAND (arg, 0), 0))
3370 ;
3371 else if (*cval2 == 0)
3372 *cval2 = TREE_OPERAND (arg, 0);
3373 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 0), 0))
3374 ;
3375 else
3376 return 0;
3377
3378 if (operand_equal_p (*cval1, TREE_OPERAND (arg, 1), 0))
3379 ;
3380 else if (*cval2 == 0)
3381 *cval2 = TREE_OPERAND (arg, 1);
3382 else if (operand_equal_p (*cval2, TREE_OPERAND (arg, 1), 0))
3383 ;
3384 else
3385 return 0;
3386
3387 return 1;
3388
3389 default:
3390 return 0;
3391 }
3392 }
3393 \f
3394 /* ARG is a tree that is known to contain just arithmetic operations and
3395 comparisons. Evaluate the operations in the tree substituting NEW0 for
3396 any occurrence of OLD0 as an operand of a comparison and likewise for
3397 NEW1 and OLD1. */
3398
3399 static tree
3400 eval_subst (location_t loc, tree arg, tree old0, tree new0,
3401 tree old1, tree new1)
3402 {
3403 tree type = TREE_TYPE (arg);
3404 enum tree_code code = TREE_CODE (arg);
3405 enum tree_code_class tclass = TREE_CODE_CLASS (code);
3406
3407 /* We can handle some of the tcc_expression cases here. */
3408 if (tclass == tcc_expression && code == TRUTH_NOT_EXPR)
3409 tclass = tcc_unary;
3410 else if (tclass == tcc_expression
3411 && (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR))
3412 tclass = tcc_binary;
3413
3414 switch (tclass)
3415 {
3416 case tcc_unary:
3417 return fold_build1_loc (loc, code, type,
3418 eval_subst (loc, TREE_OPERAND (arg, 0),
3419 old0, new0, old1, new1));
3420
3421 case tcc_binary:
3422 return fold_build2_loc (loc, code, type,
3423 eval_subst (loc, TREE_OPERAND (arg, 0),
3424 old0, new0, old1, new1),
3425 eval_subst (loc, TREE_OPERAND (arg, 1),
3426 old0, new0, old1, new1));
3427
3428 case tcc_expression:
3429 switch (code)
3430 {
3431 case SAVE_EXPR:
3432 return eval_subst (loc, TREE_OPERAND (arg, 0), old0, new0,
3433 old1, new1);
3434
3435 case COMPOUND_EXPR:
3436 return eval_subst (loc, TREE_OPERAND (arg, 1), old0, new0,
3437 old1, new1);
3438
3439 case COND_EXPR:
3440 return fold_build3_loc (loc, code, type,
3441 eval_subst (loc, TREE_OPERAND (arg, 0),
3442 old0, new0, old1, new1),
3443 eval_subst (loc, TREE_OPERAND (arg, 1),
3444 old0, new0, old1, new1),
3445 eval_subst (loc, TREE_OPERAND (arg, 2),
3446 old0, new0, old1, new1));
3447 default:
3448 break;
3449 }
3450 /* Fall through - ??? */
3451
3452 case tcc_comparison:
3453 {
3454 tree arg0 = TREE_OPERAND (arg, 0);
3455 tree arg1 = TREE_OPERAND (arg, 1);
3456
3457 /* We need to check both for exact equality and tree equality. The
3458 former will be true if the operand has a side-effect. In that
3459 case, we know the operand occurred exactly once. */
3460
3461 if (arg0 == old0 || operand_equal_p (arg0, old0, 0))
3462 arg0 = new0;
3463 else if (arg0 == old1 || operand_equal_p (arg0, old1, 0))
3464 arg0 = new1;
3465
3466 if (arg1 == old0 || operand_equal_p (arg1, old0, 0))
3467 arg1 = new0;
3468 else if (arg1 == old1 || operand_equal_p (arg1, old1, 0))
3469 arg1 = new1;
3470
3471 return fold_build2_loc (loc, code, type, arg0, arg1);
3472 }
3473
3474 default:
3475 return arg;
3476 }
3477 }
3478 \f
3479 /* Return a tree for the case when the result of an expression is RESULT
3480 converted to TYPE and OMITTED was previously an operand of the expression
3481 but is now not needed (e.g., we folded OMITTED * 0).
3482
3483 If OMITTED has side effects, we must evaluate it. Otherwise, just do
3484 the conversion of RESULT to TYPE. */
3485
3486 tree
3487 omit_one_operand_loc (location_t loc, tree type, tree result, tree omitted)
3488 {
3489 tree t = fold_convert_loc (loc, type, result);
3490
3491 /* If the resulting operand is an empty statement, just return the omitted
3492 statement casted to void. */
3493 if (IS_EMPTY_STMT (t) && TREE_SIDE_EFFECTS (omitted))
3494 return build1_loc (loc, NOP_EXPR, void_type_node,
3495 fold_ignored_result (omitted));
3496
3497 if (TREE_SIDE_EFFECTS (omitted))
3498 return build2_loc (loc, COMPOUND_EXPR, type,
3499 fold_ignored_result (omitted), t);
3500
3501 return non_lvalue_loc (loc, t);
3502 }
3503
3504 /* Return a tree for the case when the result of an expression is RESULT
3505 converted to TYPE and OMITTED1 and OMITTED2 were previously operands
3506 of the expression but are now not needed.
3507
3508 If OMITTED1 or OMITTED2 has side effects, they must be evaluated.
3509 If both OMITTED1 and OMITTED2 have side effects, OMITTED1 is
3510 evaluated before OMITTED2. Otherwise, if neither has side effects,
3511 just do the conversion of RESULT to TYPE. */
3512
3513 tree
3514 omit_two_operands_loc (location_t loc, tree type, tree result,
3515 tree omitted1, tree omitted2)
3516 {
3517 tree t = fold_convert_loc (loc, type, result);
3518
3519 if (TREE_SIDE_EFFECTS (omitted2))
3520 t = build2_loc (loc, COMPOUND_EXPR, type, omitted2, t);
3521 if (TREE_SIDE_EFFECTS (omitted1))
3522 t = build2_loc (loc, COMPOUND_EXPR, type, omitted1, t);
3523
3524 return TREE_CODE (t) != COMPOUND_EXPR ? non_lvalue_loc (loc, t) : t;
3525 }
3526
3527 \f
3528 /* Return a simplified tree node for the truth-negation of ARG. This
3529 never alters ARG itself. We assume that ARG is an operation that
3530 returns a truth value (0 or 1).
3531
3532 FIXME: one would think we would fold the result, but it causes
3533 problems with the dominator optimizer. */
3534
3535 static tree
3536 fold_truth_not_expr (location_t loc, tree arg)
3537 {
3538 tree type = TREE_TYPE (arg);
3539 enum tree_code code = TREE_CODE (arg);
3540 location_t loc1, loc2;
3541
3542 /* If this is a comparison, we can simply invert it, except for
3543 floating-point non-equality comparisons, in which case we just
3544 enclose a TRUTH_NOT_EXPR around what we have. */
3545
3546 if (TREE_CODE_CLASS (code) == tcc_comparison)
3547 {
3548 tree op_type = TREE_TYPE (TREE_OPERAND (arg, 0));
3549 if (FLOAT_TYPE_P (op_type)
3550 && flag_trapping_math
3551 && code != ORDERED_EXPR && code != UNORDERED_EXPR
3552 && code != NE_EXPR && code != EQ_EXPR)
3553 return NULL_TREE;
3554
3555 code = invert_tree_comparison (code, HONOR_NANS (op_type));
3556 if (code == ERROR_MARK)
3557 return NULL_TREE;
3558
3559 return build2_loc (loc, code, type, TREE_OPERAND (arg, 0),
3560 TREE_OPERAND (arg, 1));
3561 }
3562
3563 switch (code)
3564 {
3565 case INTEGER_CST:
3566 return constant_boolean_node (integer_zerop (arg), type);
3567
3568 case TRUTH_AND_EXPR:
3569 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3570 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3571 return build2_loc (loc, TRUTH_OR_EXPR, type,
3572 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3573 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3574
3575 case TRUTH_OR_EXPR:
3576 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3577 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3578 return build2_loc (loc, TRUTH_AND_EXPR, type,
3579 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3580 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3581
3582 case TRUTH_XOR_EXPR:
3583 /* Here we can invert either operand. We invert the first operand
3584 unless the second operand is a TRUTH_NOT_EXPR in which case our
3585 result is the XOR of the first operand with the inside of the
3586 negation of the second operand. */
3587
3588 if (TREE_CODE (TREE_OPERAND (arg, 1)) == TRUTH_NOT_EXPR)
3589 return build2_loc (loc, TRUTH_XOR_EXPR, type, TREE_OPERAND (arg, 0),
3590 TREE_OPERAND (TREE_OPERAND (arg, 1), 0));
3591 else
3592 return build2_loc (loc, TRUTH_XOR_EXPR, type,
3593 invert_truthvalue_loc (loc, TREE_OPERAND (arg, 0)),
3594 TREE_OPERAND (arg, 1));
3595
3596 case TRUTH_ANDIF_EXPR:
3597 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3598 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3599 return build2_loc (loc, TRUTH_ORIF_EXPR, type,
3600 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3601 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3602
3603 case TRUTH_ORIF_EXPR:
3604 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3605 loc2 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3606 return build2_loc (loc, TRUTH_ANDIF_EXPR, type,
3607 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)),
3608 invert_truthvalue_loc (loc2, TREE_OPERAND (arg, 1)));
3609
3610 case TRUTH_NOT_EXPR:
3611 return TREE_OPERAND (arg, 0);
3612
3613 case COND_EXPR:
3614 {
3615 tree arg1 = TREE_OPERAND (arg, 1);
3616 tree arg2 = TREE_OPERAND (arg, 2);
3617
3618 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3619 loc2 = expr_location_or (TREE_OPERAND (arg, 2), loc);
3620
3621 /* A COND_EXPR may have a throw as one operand, which
3622 then has void type. Just leave void operands
3623 as they are. */
3624 return build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg, 0),
3625 VOID_TYPE_P (TREE_TYPE (arg1))
3626 ? arg1 : invert_truthvalue_loc (loc1, arg1),
3627 VOID_TYPE_P (TREE_TYPE (arg2))
3628 ? arg2 : invert_truthvalue_loc (loc2, arg2));
3629 }
3630
3631 case COMPOUND_EXPR:
3632 loc1 = expr_location_or (TREE_OPERAND (arg, 1), loc);
3633 return build2_loc (loc, COMPOUND_EXPR, type,
3634 TREE_OPERAND (arg, 0),
3635 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 1)));
3636
3637 case NON_LVALUE_EXPR:
3638 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3639 return invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0));
3640
3641 CASE_CONVERT:
3642 if (TREE_CODE (TREE_TYPE (arg)) == BOOLEAN_TYPE)
3643 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3644
3645 /* ... fall through ... */
3646
3647 case FLOAT_EXPR:
3648 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3649 return build1_loc (loc, TREE_CODE (arg), type,
3650 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3651
3652 case BIT_AND_EXPR:
3653 if (!integer_onep (TREE_OPERAND (arg, 1)))
3654 return NULL_TREE;
3655 return build2_loc (loc, EQ_EXPR, type, arg, build_int_cst (type, 0));
3656
3657 case SAVE_EXPR:
3658 return build1_loc (loc, TRUTH_NOT_EXPR, type, arg);
3659
3660 case CLEANUP_POINT_EXPR:
3661 loc1 = expr_location_or (TREE_OPERAND (arg, 0), loc);
3662 return build1_loc (loc, CLEANUP_POINT_EXPR, type,
3663 invert_truthvalue_loc (loc1, TREE_OPERAND (arg, 0)));
3664
3665 default:
3666 return NULL_TREE;
3667 }
3668 }
3669
3670 /* Fold the truth-negation of ARG. This never alters ARG itself. We
3671 assume that ARG is an operation that returns a truth value (0 or 1
3672 for scalars, 0 or -1 for vectors). Return the folded expression if
3673 folding is successful. Otherwise, return NULL_TREE. */
3674
3675 static tree
3676 fold_invert_truthvalue (location_t loc, tree arg)
3677 {
3678 tree type = TREE_TYPE (arg);
3679 return fold_unary_loc (loc, VECTOR_TYPE_P (type)
3680 ? BIT_NOT_EXPR
3681 : TRUTH_NOT_EXPR,
3682 type, arg);
3683 }
3684
3685 /* Return a simplified tree node for the truth-negation of ARG. This
3686 never alters ARG itself. We assume that ARG is an operation that
3687 returns a truth value (0 or 1 for scalars, 0 or -1 for vectors). */
3688
3689 tree
3690 invert_truthvalue_loc (location_t loc, tree arg)
3691 {
3692 if (TREE_CODE (arg) == ERROR_MARK)
3693 return arg;
3694
3695 tree type = TREE_TYPE (arg);
3696 return fold_build1_loc (loc, VECTOR_TYPE_P (type)
3697 ? BIT_NOT_EXPR
3698 : TRUTH_NOT_EXPR,
3699 type, arg);
3700 }
3701
3702 /* Knowing that ARG0 and ARG1 are both RDIV_EXPRs, simplify a binary operation
3703 with code CODE. This optimization is unsafe. */
3704 static tree
3705 distribute_real_division (location_t loc, enum tree_code code, tree type,
3706 tree arg0, tree arg1)
3707 {
3708 bool mul0 = TREE_CODE (arg0) == MULT_EXPR;
3709 bool mul1 = TREE_CODE (arg1) == MULT_EXPR;
3710
3711 /* (A / C) +- (B / C) -> (A +- B) / C. */
3712 if (mul0 == mul1
3713 && operand_equal_p (TREE_OPERAND (arg0, 1),
3714 TREE_OPERAND (arg1, 1), 0))
3715 return fold_build2_loc (loc, mul0 ? MULT_EXPR : RDIV_EXPR, type,
3716 fold_build2_loc (loc, code, type,
3717 TREE_OPERAND (arg0, 0),
3718 TREE_OPERAND (arg1, 0)),
3719 TREE_OPERAND (arg0, 1));
3720
3721 /* (A / C1) +- (A / C2) -> A * (1 / C1 +- 1 / C2). */
3722 if (operand_equal_p (TREE_OPERAND (arg0, 0),
3723 TREE_OPERAND (arg1, 0), 0)
3724 && TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
3725 && TREE_CODE (TREE_OPERAND (arg1, 1)) == REAL_CST)
3726 {
3727 REAL_VALUE_TYPE r0, r1;
3728 r0 = TREE_REAL_CST (TREE_OPERAND (arg0, 1));
3729 r1 = TREE_REAL_CST (TREE_OPERAND (arg1, 1));
3730 if (!mul0)
3731 real_arithmetic (&r0, RDIV_EXPR, &dconst1, &r0);
3732 if (!mul1)
3733 real_arithmetic (&r1, RDIV_EXPR, &dconst1, &r1);
3734 real_arithmetic (&r0, code, &r0, &r1);
3735 return fold_build2_loc (loc, MULT_EXPR, type,
3736 TREE_OPERAND (arg0, 0),
3737 build_real (type, r0));
3738 }
3739
3740 return NULL_TREE;
3741 }
3742 \f
3743 /* Return a BIT_FIELD_REF of type TYPE to refer to BITSIZE bits of INNER
3744 starting at BITPOS. The field is unsigned if UNSIGNEDP is nonzero
3745 and uses reverse storage order if REVERSEP is nonzero. */
3746
3747 static tree
3748 make_bit_field_ref (location_t loc, tree inner, tree type,
3749 HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
3750 int unsignedp, int reversep)
3751 {
3752 tree result, bftype;
3753
3754 if (bitpos == 0 && !reversep)
3755 {
3756 tree size = TYPE_SIZE (TREE_TYPE (inner));
3757 if ((INTEGRAL_TYPE_P (TREE_TYPE (inner))
3758 || POINTER_TYPE_P (TREE_TYPE (inner)))
3759 && tree_fits_shwi_p (size)
3760 && tree_to_shwi (size) == bitsize)
3761 return fold_convert_loc (loc, type, inner);
3762 }
3763
3764 bftype = type;
3765 if (TYPE_PRECISION (bftype) != bitsize
3766 || TYPE_UNSIGNED (bftype) == !unsignedp)
3767 bftype = build_nonstandard_integer_type (bitsize, 0);
3768
3769 result = build3_loc (loc, BIT_FIELD_REF, bftype, inner,
3770 size_int (bitsize), bitsize_int (bitpos));
3771 REF_REVERSE_STORAGE_ORDER (result) = reversep;
3772
3773 if (bftype != type)
3774 result = fold_convert_loc (loc, type, result);
3775
3776 return result;
3777 }
3778
3779 /* Optimize a bit-field compare.
3780
3781 There are two cases: First is a compare against a constant and the
3782 second is a comparison of two items where the fields are at the same
3783 bit position relative to the start of a chunk (byte, halfword, word)
3784 large enough to contain it. In these cases we can avoid the shift
3785 implicit in bitfield extractions.
3786
3787 For constants, we emit a compare of the shifted constant with the
3788 BIT_AND_EXPR of a mask and a byte, halfword, or word of the operand being
3789 compared. For two fields at the same position, we do the ANDs with the
3790 similar mask and compare the result of the ANDs.
3791
3792 CODE is the comparison code, known to be either NE_EXPR or EQ_EXPR.
3793 COMPARE_TYPE is the type of the comparison, and LHS and RHS
3794 are the left and right operands of the comparison, respectively.
3795
3796 If the optimization described above can be done, we return the resulting
3797 tree. Otherwise we return zero. */
3798
3799 static tree
3800 optimize_bit_field_compare (location_t loc, enum tree_code code,
3801 tree compare_type, tree lhs, tree rhs)
3802 {
3803 HOST_WIDE_INT lbitpos, lbitsize, rbitpos, rbitsize, nbitpos, nbitsize;
3804 tree type = TREE_TYPE (lhs);
3805 tree unsigned_type;
3806 int const_p = TREE_CODE (rhs) == INTEGER_CST;
3807 machine_mode lmode, rmode, nmode;
3808 int lunsignedp, runsignedp;
3809 int lreversep, rreversep;
3810 int lvolatilep = 0, rvolatilep = 0;
3811 tree linner, rinner = NULL_TREE;
3812 tree mask;
3813 tree offset;
3814
3815 /* Get all the information about the extractions being done. If the bit size
3816 if the same as the size of the underlying object, we aren't doing an
3817 extraction at all and so can do nothing. We also don't want to
3818 do anything if the inner expression is a PLACEHOLDER_EXPR since we
3819 then will no longer be able to replace it. */
3820 linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
3821 &lunsignedp, &lreversep, &lvolatilep, false);
3822 if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
3823 || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
3824 return 0;
3825
3826 if (const_p)
3827 rreversep = lreversep;
3828 else
3829 {
3830 /* If this is not a constant, we can only do something if bit positions,
3831 sizes, signedness and storage order are the same. */
3832 rinner
3833 = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
3834 &runsignedp, &rreversep, &rvolatilep, false);
3835
3836 if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
3837 || lunsignedp != runsignedp || lreversep != rreversep || offset != 0
3838 || TREE_CODE (rinner) == PLACEHOLDER_EXPR || rvolatilep)
3839 return 0;
3840 }
3841
3842 /* See if we can find a mode to refer to this field. We should be able to,
3843 but fail if we can't. */
3844 nmode = get_best_mode (lbitsize, lbitpos, 0, 0,
3845 const_p ? TYPE_ALIGN (TREE_TYPE (linner))
3846 : MIN (TYPE_ALIGN (TREE_TYPE (linner)),
3847 TYPE_ALIGN (TREE_TYPE (rinner))),
3848 word_mode, false);
3849 if (nmode == VOIDmode)
3850 return 0;
3851
3852 /* Set signed and unsigned types of the precision of this mode for the
3853 shifts below. */
3854 unsigned_type = lang_hooks.types.type_for_mode (nmode, 1);
3855
3856 /* Compute the bit position and size for the new reference and our offset
3857 within it. If the new reference is the same size as the original, we
3858 won't optimize anything, so return zero. */
3859 nbitsize = GET_MODE_BITSIZE (nmode);
3860 nbitpos = lbitpos & ~ (nbitsize - 1);
3861 lbitpos -= nbitpos;
3862 if (nbitsize == lbitsize)
3863 return 0;
3864
3865 if (lreversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
3866 lbitpos = nbitsize - lbitsize - lbitpos;
3867
3868 /* Make the mask to be used against the extracted field. */
3869 mask = build_int_cst_type (unsigned_type, -1);
3870 mask = const_binop (LSHIFT_EXPR, mask, size_int (nbitsize - lbitsize));
3871 mask = const_binop (RSHIFT_EXPR, mask,
3872 size_int (nbitsize - lbitsize - lbitpos));
3873
3874 if (! const_p)
3875 /* If not comparing with constant, just rework the comparison
3876 and return. */
3877 return fold_build2_loc (loc, code, compare_type,
3878 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3879 make_bit_field_ref (loc, linner,
3880 unsigned_type,
3881 nbitsize, nbitpos,
3882 1, lreversep),
3883 mask),
3884 fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
3885 make_bit_field_ref (loc, rinner,
3886 unsigned_type,
3887 nbitsize, nbitpos,
3888 1, rreversep),
3889 mask));
3890
3891 /* Otherwise, we are handling the constant case. See if the constant is too
3892 big for the field. Warn and return a tree for 0 (false) if so. We do
3893 this not only for its own sake, but to avoid having to test for this
3894 error case below. If we didn't, we might generate wrong code.
3895
3896 For unsigned fields, the constant shifted right by the field length should
3897 be all zero. For signed fields, the high-order bits should agree with
3898 the sign bit. */
3899
3900 if (lunsignedp)
3901 {
3902 if (wi::lrshift (rhs, lbitsize) != 0)
3903 {
3904 warning (0, "comparison is always %d due to width of bit-field",
3905 code == NE_EXPR);
3906 return constant_boolean_node (code == NE_EXPR, compare_type);
3907 }
3908 }
3909 else
3910 {
3911 wide_int tem = wi::arshift (rhs, lbitsize - 1);
3912 if (tem != 0 && tem != -1)
3913 {
3914 warning (0, "comparison is always %d due to width of bit-field",
3915 code == NE_EXPR);
3916 return constant_boolean_node (code == NE_EXPR, compare_type);
3917 }
3918 }
3919
3920 /* Single-bit compares should always be against zero. */
3921 if (lbitsize == 1 && ! integer_zerop (rhs))
3922 {
3923 code = code == EQ_EXPR ? NE_EXPR : EQ_EXPR;
3924 rhs = build_int_cst (type, 0);
3925 }
3926
3927 /* Make a new bitfield reference, shift the constant over the
3928 appropriate number of bits and mask it with the computed mask
3929 (in case this was a signed field). If we changed it, make a new one. */
3930 lhs = make_bit_field_ref (loc, linner, unsigned_type, nbitsize, nbitpos, 1,
3931 lreversep);
3932
3933 rhs = const_binop (BIT_AND_EXPR,
3934 const_binop (LSHIFT_EXPR,
3935 fold_convert_loc (loc, unsigned_type, rhs),
3936 size_int (lbitpos)),
3937 mask);
3938
3939 lhs = build2_loc (loc, code, compare_type,
3940 build2 (BIT_AND_EXPR, unsigned_type, lhs, mask), rhs);
3941 return lhs;
3942 }
3943 \f
3944 /* Subroutine for fold_truth_andor_1: decode a field reference.
3945
3946 If EXP is a comparison reference, we return the innermost reference.
3947
3948 *PBITSIZE is set to the number of bits in the reference, *PBITPOS is
3949 set to the starting bit number.
3950
3951 If the innermost field can be completely contained in a mode-sized
3952 unit, *PMODE is set to that mode. Otherwise, it is set to VOIDmode.
3953
3954 *PVOLATILEP is set to 1 if the any expression encountered is volatile;
3955 otherwise it is not changed.
3956
3957 *PUNSIGNEDP is set to the signedness of the field.
3958
3959 *PREVERSEP is set to the storage order of the field.
3960
3961 *PMASK is set to the mask used. This is either contained in a
3962 BIT_AND_EXPR or derived from the width of the field.
3963
3964 *PAND_MASK is set to the mask found in a BIT_AND_EXPR, if any.
3965
3966 Return 0 if this is not a component reference or is one that we can't
3967 do anything with. */
3968
3969 static tree
3970 decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
3971 HOST_WIDE_INT *pbitpos, machine_mode *pmode,
3972 int *punsignedp, int *preversep, int *pvolatilep,
3973 tree *pmask, tree *pand_mask)
3974 {
3975 tree outer_type = 0;
3976 tree and_mask = 0;
3977 tree mask, inner, offset;
3978 tree unsigned_type;
3979 unsigned int precision;
3980
3981 /* All the optimizations using this function assume integer fields.
3982 There are problems with FP fields since the type_for_size call
3983 below can fail for, e.g., XFmode. */
3984 if (! INTEGRAL_TYPE_P (TREE_TYPE (exp)))
3985 return 0;
3986
3987 /* We are interested in the bare arrangement of bits, so strip everything
3988 that doesn't affect the machine mode. However, record the type of the
3989 outermost expression if it may matter below. */
3990 if (CONVERT_EXPR_P (exp)
3991 || TREE_CODE (exp) == NON_LVALUE_EXPR)
3992 outer_type = TREE_TYPE (exp);
3993 STRIP_NOPS (exp);
3994
3995 if (TREE_CODE (exp) == BIT_AND_EXPR)
3996 {
3997 and_mask = TREE_OPERAND (exp, 1);
3998 exp = TREE_OPERAND (exp, 0);
3999 STRIP_NOPS (exp); STRIP_NOPS (and_mask);
4000 if (TREE_CODE (and_mask) != INTEGER_CST)
4001 return 0;
4002 }
4003
4004 inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
4005 punsignedp, preversep, pvolatilep, false);
4006 if ((inner == exp && and_mask == 0)
4007 || *pbitsize < 0 || offset != 0
4008 || TREE_CODE (inner) == PLACEHOLDER_EXPR)
4009 return 0;
4010
4011 /* If the number of bits in the reference is the same as the bitsize of
4012 the outer type, then the outer type gives the signedness. Otherwise
4013 (in case of a small bitfield) the signedness is unchanged. */
4014 if (outer_type && *pbitsize == TYPE_PRECISION (outer_type))
4015 *punsignedp = TYPE_UNSIGNED (outer_type);
4016
4017 /* Compute the mask to access the bitfield. */
4018 unsigned_type = lang_hooks.types.type_for_size (*pbitsize, 1);
4019 precision = TYPE_PRECISION (unsigned_type);
4020
4021 mask = build_int_cst_type (unsigned_type, -1);
4022
4023 mask = const_binop (LSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4024 mask = const_binop (RSHIFT_EXPR, mask, size_int (precision - *pbitsize));
4025
4026 /* Merge it with the mask we found in the BIT_AND_EXPR, if any. */
4027 if (and_mask != 0)
4028 mask = fold_build2_loc (loc, BIT_AND_EXPR, unsigned_type,
4029 fold_convert_loc (loc, unsigned_type, and_mask), mask);
4030
4031 *pmask = mask;
4032 *pand_mask = and_mask;
4033 return inner;
4034 }
4035
4036 /* Return nonzero if MASK represents a mask of SIZE ones in the low-order
4037 bit positions and MASK is SIGNED. */
4038
4039 static int
4040 all_ones_mask_p (const_tree mask, unsigned int size)
4041 {
4042 tree type = TREE_TYPE (mask);
4043 unsigned int precision = TYPE_PRECISION (type);
4044
4045 /* If this function returns true when the type of the mask is
4046 UNSIGNED, then there will be errors. In particular see
4047 gcc.c-torture/execute/990326-1.c. There does not appear to be
4048 any documentation paper trail as to why this is so. But the pre
4049 wide-int worked with that restriction and it has been preserved
4050 here. */
4051 if (size > precision || TYPE_SIGN (type) == UNSIGNED)
4052 return false;
4053
4054 return wi::mask (size, false, precision) == mask;
4055 }
4056
4057 /* Subroutine for fold: determine if VAL is the INTEGER_CONST that
4058 represents the sign bit of EXP's type. If EXP represents a sign
4059 or zero extension, also test VAL against the unextended type.
4060 The return value is the (sub)expression whose sign bit is VAL,
4061 or NULL_TREE otherwise. */
4062
4063 tree
4064 sign_bit_p (tree exp, const_tree val)
4065 {
4066 int width;
4067 tree t;
4068
4069 /* Tree EXP must have an integral type. */
4070 t = TREE_TYPE (exp);
4071 if (! INTEGRAL_TYPE_P (t))
4072 return NULL_TREE;
4073
4074 /* Tree VAL must be an integer constant. */
4075 if (TREE_CODE (val) != INTEGER_CST
4076 || TREE_OVERFLOW (val))
4077 return NULL_TREE;
4078
4079 width = TYPE_PRECISION (t);
4080 if (wi::only_sign_bit_p (val, width))
4081 return exp;
4082
4083 /* Handle extension from a narrower type. */
4084 if (TREE_CODE (exp) == NOP_EXPR
4085 && TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (exp, 0))) < width)
4086 return sign_bit_p (TREE_OPERAND (exp, 0), val);
4087
4088 return NULL_TREE;
4089 }
4090
4091 /* Subroutine for fold_truth_andor_1: determine if an operand is simple enough
4092 to be evaluated unconditionally. */
4093
4094 static int
4095 simple_operand_p (const_tree exp)
4096 {
4097 /* Strip any conversions that don't change the machine mode. */
4098 STRIP_NOPS (exp);
4099
4100 return (CONSTANT_CLASS_P (exp)
4101 || TREE_CODE (exp) == SSA_NAME
4102 || (DECL_P (exp)
4103 && ! TREE_ADDRESSABLE (exp)
4104 && ! TREE_THIS_VOLATILE (exp)
4105 && ! DECL_NONLOCAL (exp)
4106 /* Don't regard global variables as simple. They may be
4107 allocated in ways unknown to the compiler (shared memory,
4108 #pragma weak, etc). */
4109 && ! TREE_PUBLIC (exp)
4110 && ! DECL_EXTERNAL (exp)
4111 /* Weakrefs are not safe to be read, since they can be NULL.
4112 They are !TREE_PUBLIC && !DECL_EXTERNAL but still
4113 have DECL_WEAK flag set. */
4114 && (! VAR_OR_FUNCTION_DECL_P (exp) || ! DECL_WEAK (exp))
4115 /* Loading a static variable is unduly expensive, but global
4116 registers aren't expensive. */
4117 && (! TREE_STATIC (exp) || DECL_REGISTER (exp))));
4118 }
4119
4120 /* Subroutine for fold_truth_andor: determine if an operand is simple enough
4121 to be evaluated unconditionally.
4122 I addition to simple_operand_p, we assume that comparisons, conversions,
4123 and logic-not operations are simple, if their operands are simple, too. */
4124
4125 static bool
4126 simple_operand_p_2 (tree exp)
4127 {
4128 enum tree_code code;
4129
4130 if (TREE_SIDE_EFFECTS (exp)
4131 || tree_could_trap_p (exp))
4132 return false;
4133
4134 while (CONVERT_EXPR_P (exp))
4135 exp = TREE_OPERAND (exp, 0);
4136
4137 code = TREE_CODE (exp);
4138
4139 if (TREE_CODE_CLASS (code) == tcc_comparison)
4140 return (simple_operand_p (TREE_OPERAND (exp, 0))
4141 && simple_operand_p (TREE_OPERAND (exp, 1)));
4142
4143 if (code == TRUTH_NOT_EXPR)
4144 return simple_operand_p_2 (TREE_OPERAND (exp, 0));
4145
4146 return simple_operand_p (exp);
4147 }
4148
4149 \f
4150 /* The following functions are subroutines to fold_range_test and allow it to
4151 try to change a logical combination of comparisons into a range test.
4152
4153 For example, both
4154 X == 2 || X == 3 || X == 4 || X == 5
4155 and
4156 X >= 2 && X <= 5
4157 are converted to
4158 (unsigned) (X - 2) <= 3
4159
4160 We describe each set of comparisons as being either inside or outside
4161 a range, using a variable named like IN_P, and then describe the
4162 range with a lower and upper bound. If one of the bounds is omitted,
4163 it represents either the highest or lowest value of the type.
4164
4165 In the comments below, we represent a range by two numbers in brackets
4166 preceded by a "+" to designate being inside that range, or a "-" to
4167 designate being outside that range, so the condition can be inverted by
4168 flipping the prefix. An omitted bound is represented by a "-". For
4169 example, "- [-, 10]" means being outside the range starting at the lowest
4170 possible value and ending at 10, in other words, being greater than 10.
4171 The range "+ [-, -]" is always true and hence the range "- [-, -]" is
4172 always false.
4173
4174 We set up things so that the missing bounds are handled in a consistent
4175 manner so neither a missing bound nor "true" and "false" need to be
4176 handled using a special case. */
4177
4178 /* Return the result of applying CODE to ARG0 and ARG1, but handle the case
4179 of ARG0 and/or ARG1 being omitted, meaning an unlimited range. UPPER0_P
4180 and UPPER1_P are nonzero if the respective argument is an upper bound
4181 and zero for a lower. TYPE, if nonzero, is the type of the result; it
4182 must be specified for a comparison. ARG1 will be converted to ARG0's
4183 type if both are specified. */
4184
4185 static tree
4186 range_binop (enum tree_code code, tree type, tree arg0, int upper0_p,
4187 tree arg1, int upper1_p)
4188 {
4189 tree tem;
4190 int result;
4191 int sgn0, sgn1;
4192
4193 /* If neither arg represents infinity, do the normal operation.
4194 Else, if not a comparison, return infinity. Else handle the special
4195 comparison rules. Note that most of the cases below won't occur, but
4196 are handled for consistency. */
4197
4198 if (arg0 != 0 && arg1 != 0)
4199 {
4200 tem = fold_build2 (code, type != 0 ? type : TREE_TYPE (arg0),
4201 arg0, fold_convert (TREE_TYPE (arg0), arg1));
4202 STRIP_NOPS (tem);
4203 return TREE_CODE (tem) == INTEGER_CST ? tem : 0;
4204 }
4205
4206 if (TREE_CODE_CLASS (code) != tcc_comparison)
4207 return 0;
4208
4209 /* Set SGN[01] to -1 if ARG[01] is a lower bound, 1 for upper, and 0
4210 for neither. In real maths, we cannot assume open ended ranges are
4211 the same. But, this is computer arithmetic, where numbers are finite.
4212 We can therefore make the transformation of any unbounded range with
4213 the value Z, Z being greater than any representable number. This permits
4214 us to treat unbounded ranges as equal. */
4215 sgn0 = arg0 != 0 ? 0 : (upper0_p ? 1 : -1);
4216 sgn1 = arg1 != 0 ? 0 : (upper1_p ? 1 : -1);
4217 switch (code)
4218 {
4219 case EQ_EXPR:
4220 result = sgn0 == sgn1;
4221 break;
4222 case NE_EXPR:
4223 result = sgn0 != sgn1;
4224 break;
4225 case LT_EXPR:
4226 result = sgn0 < sgn1;
4227 break;
4228 case LE_EXPR:
4229 result = sgn0 <= sgn1;
4230 break;
4231 case GT_EXPR:
4232 result = sgn0 > sgn1;
4233 break;
4234 case GE_EXPR:
4235 result = sgn0 >= sgn1;
4236 break;
4237 default:
4238 gcc_unreachable ();
4239 }
4240
4241 return constant_boolean_node (result, type);
4242 }
4243 \f
4244 /* Helper routine for make_range. Perform one step for it, return
4245 new expression if the loop should continue or NULL_TREE if it should
4246 stop. */
4247
4248 tree
4249 make_range_step (location_t loc, enum tree_code code, tree arg0, tree arg1,
4250 tree exp_type, tree *p_low, tree *p_high, int *p_in_p,
4251 bool *strict_overflow_p)
4252 {
4253 tree arg0_type = TREE_TYPE (arg0);
4254 tree n_low, n_high, low = *p_low, high = *p_high;
4255 int in_p = *p_in_p, n_in_p;
4256
4257 switch (code)
4258 {
4259 case TRUTH_NOT_EXPR:
4260 /* We can only do something if the range is testing for zero. */
4261 if (low == NULL_TREE || high == NULL_TREE
4262 || ! integer_zerop (low) || ! integer_zerop (high))
4263 return NULL_TREE;
4264 *p_in_p = ! in_p;
4265 return arg0;
4266
4267 case EQ_EXPR: case NE_EXPR:
4268 case LT_EXPR: case LE_EXPR: case GE_EXPR: case GT_EXPR:
4269 /* We can only do something if the range is testing for zero
4270 and if the second operand is an integer constant. Note that
4271 saying something is "in" the range we make is done by
4272 complementing IN_P since it will set in the initial case of
4273 being not equal to zero; "out" is leaving it alone. */
4274 if (low == NULL_TREE || high == NULL_TREE
4275 || ! integer_zerop (low) || ! integer_zerop (high)
4276 || TREE_CODE (arg1) != INTEGER_CST)
4277 return NULL_TREE;
4278
4279 switch (code)
4280 {
4281 case NE_EXPR: /* - [c, c] */
4282 low = high = arg1;
4283 break;
4284 case EQ_EXPR: /* + [c, c] */
4285 in_p = ! in_p, low = high = arg1;
4286 break;
4287 case GT_EXPR: /* - [-, c] */
4288 low = 0, high = arg1;
4289 break;
4290 case GE_EXPR: /* + [c, -] */
4291 in_p = ! in_p, low = arg1, high = 0;
4292 break;
4293 case LT_EXPR: /* - [c, -] */
4294 low = arg1, high = 0;
4295 break;
4296 case LE_EXPR: /* + [-, c] */
4297 in_p = ! in_p, low = 0, high = arg1;
4298 break;
4299 default:
4300 gcc_unreachable ();
4301 }
4302
4303 /* If this is an unsigned comparison, we also know that EXP is
4304 greater than or equal to zero. We base the range tests we make
4305 on that fact, so we record it here so we can parse existing
4306 range tests. We test arg0_type since often the return type
4307 of, e.g. EQ_EXPR, is boolean. */
4308 if (TYPE_UNSIGNED (arg0_type) && (low == 0 || high == 0))
4309 {
4310 if (! merge_ranges (&n_in_p, &n_low, &n_high,
4311 in_p, low, high, 1,
4312 build_int_cst (arg0_type, 0),
4313 NULL_TREE))
4314 return NULL_TREE;
4315
4316 in_p = n_in_p, low = n_low, high = n_high;
4317
4318 /* If the high bound is missing, but we have a nonzero low
4319 bound, reverse the range so it goes from zero to the low bound
4320 minus 1. */
4321 if (high == 0 && low && ! integer_zerop (low))
4322 {
4323 in_p = ! in_p;
4324 high = range_binop (MINUS_EXPR, NULL_TREE, low, 0,
4325 build_int_cst (TREE_TYPE (low), 1), 0);
4326 low = build_int_cst (arg0_type, 0);
4327 }
4328 }
4329
4330 *p_low = low;
4331 *p_high = high;
4332 *p_in_p = in_p;
4333 return arg0;
4334
4335 case NEGATE_EXPR:
4336 /* If flag_wrapv and ARG0_TYPE is signed, make sure
4337 low and high are non-NULL, then normalize will DTRT. */
4338 if (!TYPE_UNSIGNED (arg0_type)
4339 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4340 {
4341 if (low == NULL_TREE)
4342 low = TYPE_MIN_VALUE (arg0_type);
4343 if (high == NULL_TREE)
4344 high = TYPE_MAX_VALUE (arg0_type);
4345 }
4346
4347 /* (-x) IN [a,b] -> x in [-b, -a] */
4348 n_low = range_binop (MINUS_EXPR, exp_type,
4349 build_int_cst (exp_type, 0),
4350 0, high, 1);
4351 n_high = range_binop (MINUS_EXPR, exp_type,
4352 build_int_cst (exp_type, 0),
4353 0, low, 0);
4354 if (n_high != 0 && TREE_OVERFLOW (n_high))
4355 return NULL_TREE;
4356 goto normalize;
4357
4358 case BIT_NOT_EXPR:
4359 /* ~ X -> -X - 1 */
4360 return build2_loc (loc, MINUS_EXPR, exp_type, negate_expr (arg0),
4361 build_int_cst (exp_type, 1));
4362
4363 case PLUS_EXPR:
4364 case MINUS_EXPR:
4365 if (TREE_CODE (arg1) != INTEGER_CST)
4366 return NULL_TREE;
4367
4368 /* If flag_wrapv and ARG0_TYPE is signed, then we cannot
4369 move a constant to the other side. */
4370 if (!TYPE_UNSIGNED (arg0_type)
4371 && !TYPE_OVERFLOW_UNDEFINED (arg0_type))
4372 return NULL_TREE;
4373
4374 /* If EXP is signed, any overflow in the computation is undefined,
4375 so we don't worry about it so long as our computations on
4376 the bounds don't overflow. For unsigned, overflow is defined
4377 and this is exactly the right thing. */
4378 n_low = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4379 arg0_type, low, 0, arg1, 0);
4380 n_high = range_binop (code == MINUS_EXPR ? PLUS_EXPR : MINUS_EXPR,
4381 arg0_type, high, 1, arg1, 0);
4382 if ((n_low != 0 && TREE_OVERFLOW (n_low))
4383 || (n_high != 0 && TREE_OVERFLOW (n_high)))
4384 return NULL_TREE;
4385
4386 if (TYPE_OVERFLOW_UNDEFINED (arg0_type))
4387 *strict_overflow_p = true;
4388
4389 normalize:
4390 /* Check for an unsigned range which has wrapped around the maximum
4391 value thus making n_high < n_low, and normalize it. */
4392 if (n_low && n_high && tree_int_cst_lt (n_high, n_low))
4393 {
4394 low = range_binop (PLUS_EXPR, arg0_type, n_high, 0,
4395 build_int_cst (TREE_TYPE (n_high), 1), 0);
4396 high = range_binop (MINUS_EXPR, arg0_type, n_low, 0,
4397 build_int_cst (TREE_TYPE (n_low), 1), 0);
4398
4399 /* If the range is of the form +/- [ x+1, x ], we won't
4400 be able to normalize it. But then, it represents the
4401 whole range or the empty set, so make it
4402 +/- [ -, - ]. */
4403 if (tree_int_cst_equal (n_low, low)
4404 && tree_int_cst_equal (n_high, high))
4405 low = high = 0;
4406 else
4407 in_p = ! in_p;
4408 }
4409 else
4410 low = n_low, high = n_high;
4411
4412 *p_low = low;
4413 *p_high = high;
4414 *p_in_p = in_p;
4415 return arg0;
4416
4417 CASE_CONVERT:
4418 case NON_LVALUE_EXPR:
4419 if (TYPE_PRECISION (arg0_type) > TYPE_PRECISION (exp_type))
4420 return NULL_TREE;
4421
4422 if (! INTEGRAL_TYPE_P (arg0_type)
4423 || (low != 0 && ! int_fits_type_p (low, arg0_type))
4424 || (high != 0 && ! int_fits_type_p (high, arg0_type)))
4425 return NULL_TREE;
4426
4427 n_low = low, n_high = high;
4428
4429 if (n_low != 0)
4430 n_low = fold_convert_loc (loc, arg0_type, n_low);
4431
4432 if (n_high != 0)
4433 n_high = fold_convert_loc (loc, arg0_type, n_high);
4434
4435 /* If we're converting arg0 from an unsigned type, to exp,
4436 a signed type, we will be doing the comparison as unsigned.
4437 The tests above have already verified that LOW and HIGH
4438 are both positive.
4439
4440 So we have to ensure that we will handle large unsigned
4441 values the same way that the current signed bounds treat
4442 negative values. */
4443
4444 if (!TYPE_UNSIGNED (exp_type) && TYPE_UNSIGNED (arg0_type))
4445 {
4446 tree high_positive;
4447 tree equiv_type;
4448 /* For fixed-point modes, we need to pass the saturating flag
4449 as the 2nd parameter. */
4450 if (ALL_FIXED_POINT_MODE_P (TYPE_MODE (arg0_type)))
4451 equiv_type
4452 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type),
4453 TYPE_SATURATING (arg0_type));
4454 else
4455 equiv_type
4456 = lang_hooks.types.type_for_mode (TYPE_MODE (arg0_type), 1);
4457
4458 /* A range without an upper bound is, naturally, unbounded.
4459 Since convert would have cropped a very large value, use
4460 the max value for the destination type. */
4461 high_positive
4462 = TYPE_MAX_VALUE (equiv_type) ? TYPE_MAX_VALUE (equiv_type)
4463 : TYPE_MAX_VALUE (arg0_type);
4464
4465 if (TYPE_PRECISION (exp_type) == TYPE_PRECISION (arg0_type))
4466 high_positive = fold_build2_loc (loc, RSHIFT_EXPR, arg0_type,
4467 fold_convert_loc (loc, arg0_type,
4468 high_positive),
4469 build_int_cst (arg0_type, 1));
4470
4471 /* If the low bound is specified, "and" the range with the
4472 range for which the original unsigned value will be
4473 positive. */
4474 if (low != 0)
4475 {
4476 if (! merge_ranges (&n_in_p, &n_low, &n_high, 1, n_low, n_high,
4477 1, fold_convert_loc (loc, arg0_type,
4478 integer_zero_node),
4479 high_positive))
4480 return NULL_TREE;
4481
4482 in_p = (n_in_p == in_p);
4483 }
4484 else
4485 {
4486 /* Otherwise, "or" the range with the range of the input
4487 that will be interpreted as negative. */
4488 if (! merge_ranges (&n_in_p, &n_low, &n_high, 0, n_low, n_high,
4489 1, fold_convert_loc (loc, arg0_type,
4490 integer_zero_node),
4491 high_positive))
4492 return NULL_TREE;
4493
4494 in_p = (in_p != n_in_p);
4495 }
4496 }
4497
4498 *p_low = n_low;
4499 *p_high = n_high;
4500 *p_in_p = in_p;
4501 return arg0;
4502
4503 default:
4504 return NULL_TREE;
4505 }
4506 }
4507
4508 /* Given EXP, a logical expression, set the range it is testing into
4509 variables denoted by PIN_P, PLOW, and PHIGH. Return the expression
4510 actually being tested. *PLOW and *PHIGH will be made of the same
4511 type as the returned expression. If EXP is not a comparison, we
4512 will most likely not be returning a useful value and range. Set
4513 *STRICT_OVERFLOW_P to true if the return value is only valid
4514 because signed overflow is undefined; otherwise, do not change
4515 *STRICT_OVERFLOW_P. */
4516
4517 tree
4518 make_range (tree exp, int *pin_p, tree *plow, tree *phigh,
4519 bool *strict_overflow_p)
4520 {
4521 enum tree_code code;
4522 tree arg0, arg1 = NULL_TREE;
4523 tree exp_type, nexp;
4524 int in_p;
4525 tree low, high;
4526 location_t loc = EXPR_LOCATION (exp);
4527
4528 /* Start with simply saying "EXP != 0" and then look at the code of EXP
4529 and see if we can refine the range. Some of the cases below may not
4530 happen, but it doesn't seem worth worrying about this. We "continue"
4531 the outer loop when we've changed something; otherwise we "break"
4532 the switch, which will "break" the while. */
4533
4534 in_p = 0;
4535 low = high = build_int_cst (TREE_TYPE (exp), 0);
4536
4537 while (1)
4538 {
4539 code = TREE_CODE (exp);
4540 exp_type = TREE_TYPE (exp);
4541 arg0 = NULL_TREE;
4542
4543 if (IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)))
4544 {
4545 if (TREE_OPERAND_LENGTH (exp) > 0)
4546 arg0 = TREE_OPERAND (exp, 0);
4547 if (TREE_CODE_CLASS (code) == tcc_binary
4548 || TREE_CODE_CLASS (code) == tcc_comparison
4549 || (TREE_CODE_CLASS (code) == tcc_expression
4550 && TREE_OPERAND_LENGTH (exp) > 1))
4551 arg1 = TREE_OPERAND (exp, 1);
4552 }
4553 if (arg0 == NULL_TREE)
4554 break;
4555
4556 nexp = make_range_step (loc, code, arg0, arg1, exp_type, &low,
4557 &high, &in_p, strict_overflow_p);
4558 if (nexp == NULL_TREE)
4559 break;
4560 exp = nexp;
4561 }
4562
4563 /* If EXP is a constant, we can evaluate whether this is true or false. */
4564 if (TREE_CODE (exp) == INTEGER_CST)
4565 {
4566 in_p = in_p == (integer_onep (range_binop (GE_EXPR, integer_type_node,
4567 exp, 0, low, 0))
4568 && integer_onep (range_binop (LE_EXPR, integer_type_node,
4569 exp, 1, high, 1)));
4570 low = high = 0;
4571 exp = 0;
4572 }
4573
4574 *pin_p = in_p, *plow = low, *phigh = high;
4575 return exp;
4576 }
4577 \f
4578 /* Given a range, LOW, HIGH, and IN_P, an expression, EXP, and a result
4579 type, TYPE, return an expression to test if EXP is in (or out of, depending
4580 on IN_P) the range. Return 0 if the test couldn't be created. */
4581
4582 tree
4583 build_range_check (location_t loc, tree type, tree exp, int in_p,
4584 tree low, tree high)
4585 {
4586 tree etype = TREE_TYPE (exp), value;
4587
4588 /* Disable this optimization for function pointer expressions
4589 on targets that require function pointer canonicalization. */
4590 if (targetm.have_canonicalize_funcptr_for_compare ()
4591 && TREE_CODE (etype) == POINTER_TYPE
4592 && TREE_CODE (TREE_TYPE (etype)) == FUNCTION_TYPE)
4593 return NULL_TREE;
4594
4595 if (! in_p)
4596 {
4597 value = build_range_check (loc, type, exp, 1, low, high);
4598 if (value != 0)
4599 return invert_truthvalue_loc (loc, value);
4600
4601 return 0;
4602 }
4603
4604 if (low == 0 && high == 0)
4605 return omit_one_operand_loc (loc, type, build_int_cst (type, 1), exp);
4606
4607 if (low == 0)
4608 return fold_build2_loc (loc, LE_EXPR, type, exp,
4609 fold_convert_loc (loc, etype, high));
4610
4611 if (high == 0)
4612 return fold_build2_loc (loc, GE_EXPR, type, exp,
4613 fold_convert_loc (loc, etype, low));
4614
4615 if (operand_equal_p (low, high, 0))
4616 return fold_build2_loc (loc, EQ_EXPR, type, exp,
4617 fold_convert_loc (loc, etype, low));
4618
4619 if (integer_zerop (low))
4620 {
4621 if (! TYPE_UNSIGNED (etype))
4622 {
4623 etype = unsigned_type_for (etype);
4624 high = fold_convert_loc (loc, etype, high);
4625 exp = fold_convert_loc (loc, etype, exp);
4626 }
4627 return build_range_check (loc, type, exp, 1, 0, high);
4628 }
4629
4630 /* Optimize (c>=1) && (c<=127) into (signed char)c > 0. */
4631 if (integer_onep (low) && TREE_CODE (high) == INTEGER_CST)
4632 {
4633 int prec = TYPE_PRECISION (etype);
4634
4635 if (wi::mask (prec - 1, false, prec) == high)
4636 {
4637 if (TYPE_UNSIGNED (etype))
4638 {
4639 tree signed_etype = signed_type_for (etype);
4640 if (TYPE_PRECISION (signed_etype) != TYPE_PRECISION (etype))
4641 etype
4642 = build_nonstandard_integer_type (TYPE_PRECISION (etype), 0);
4643 else
4644 etype = signed_etype;
4645 exp = fold_convert_loc (loc, etype, exp);
4646 }
4647 return fold_build2_loc (loc, GT_EXPR, type, exp,
4648 build_int_cst (etype, 0));
4649 }
4650 }
4651
4652 /* Optimize (c>=low) && (c<=high) into (c-low>=0) && (c-low<=high-low).
4653 This requires wrap-around arithmetics for the type of the expression.
4654 First make sure that arithmetics in this type is valid, then make sure
4655 that it wraps around. */
4656 if (TREE_CODE (etype) == ENUMERAL_TYPE || TREE_CODE (etype) == BOOLEAN_TYPE)
4657 etype = lang_hooks.types.type_for_size (TYPE_PRECISION (etype),
4658 TYPE_UNSIGNED (etype));
4659
4660 if (TREE_CODE (etype) == INTEGER_TYPE && !TYPE_OVERFLOW_WRAPS (etype))
4661 {
4662 tree utype, minv, maxv;
4663
4664 /* Check if (unsigned) INT_MAX + 1 == (unsigned) INT_MIN
4665 for the type in question, as we rely on this here. */
4666 utype = unsigned_type_for (etype);
4667 maxv = fold_convert_loc (loc, utype, TYPE_MAX_VALUE (etype));
4668 maxv = range_binop (PLUS_EXPR, NULL_TREE, maxv, 1,
4669 build_int_cst (TREE_TYPE (maxv), 1), 1);
4670 minv = fold_convert_loc (loc, utype, TYPE_MIN_VALUE (etype));
4671
4672 if (integer_zerop (range_binop (NE_EXPR, integer_type_node,
4673 minv, 1, maxv, 1)))
4674 etype = utype;
4675 else
4676 return 0;
4677 }
4678
4679 high = fold_convert_loc (loc, etype, high);
4680 low = fold_convert_loc (loc, etype, low);
4681 exp = fold_convert_loc (loc, etype, exp);
4682
4683 value = const_binop (MINUS_EXPR, high, low);
4684
4685
4686 if (POINTER_TYPE_P (etype))
4687 {
4688 if (value != 0 && !TREE_OVERFLOW (value))
4689 {
4690 low = fold_build1_loc (loc, NEGATE_EXPR, TREE_TYPE (low), low);
4691 return build_range_check (loc, type,
4692 fold_build_pointer_plus_loc (loc, exp, low),
4693 1, build_int_cst (etype, 0), value);
4694 }
4695 return 0;
4696 }
4697
4698 if (value != 0 && !TREE_OVERFLOW (value))
4699 return build_range_check (loc, type,
4700 fold_build2_loc (loc, MINUS_EXPR, etype, exp, low),
4701 1, build_int_cst (etype, 0), value);
4702
4703 return 0;
4704 }
4705 \f
4706 /* Return the predecessor of VAL in its type, handling the infinite case. */
4707
4708 static tree
4709 range_predecessor (tree val)
4710 {
4711 tree type = TREE_TYPE (val);
4712
4713 if (INTEGRAL_TYPE_P (type)
4714 && operand_equal_p (val, TYPE_MIN_VALUE (type), 0))
4715 return 0;
4716 else
4717 return range_binop (MINUS_EXPR, NULL_TREE, val, 0,
4718 build_int_cst (TREE_TYPE (val), 1), 0);
4719 }
4720
4721 /* Return the successor of VAL in its type, handling the infinite case. */
4722
4723 static tree
4724 range_successor (tree val)
4725 {
4726 tree type = TREE_TYPE (val);
4727
4728 if (INTEGRAL_TYPE_P (type)
4729 && operand_equal_p (val, TYPE_MAX_VALUE (type), 0))
4730 return 0;
4731 else
4732 return range_binop (PLUS_EXPR, NULL_TREE, val, 0,
4733 build_int_cst (TREE_TYPE (val), 1), 0);
4734 }
4735
4736 /* Given two ranges, see if we can merge them into one. Return 1 if we
4737 can, 0 if we can't. Set the output range into the specified parameters. */
4738
4739 bool
4740 merge_ranges (int *pin_p, tree *plow, tree *phigh, int in0_p, tree low0,
4741 tree high0, int in1_p, tree low1, tree high1)
4742 {
4743 int no_overlap;
4744 int subset;
4745 int temp;
4746 tree tem;
4747 int in_p;
4748 tree low, high;
4749 int lowequal = ((low0 == 0 && low1 == 0)
4750 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4751 low0, 0, low1, 0)));
4752 int highequal = ((high0 == 0 && high1 == 0)
4753 || integer_onep (range_binop (EQ_EXPR, integer_type_node,
4754 high0, 1, high1, 1)));
4755
4756 /* Make range 0 be the range that starts first, or ends last if they
4757 start at the same value. Swap them if it isn't. */
4758 if (integer_onep (range_binop (GT_EXPR, integer_type_node,
4759 low0, 0, low1, 0))
4760 || (lowequal
4761 && integer_onep (range_binop (GT_EXPR, integer_type_node,
4762 high1, 1, high0, 1))))
4763 {
4764 temp = in0_p, in0_p = in1_p, in1_p = temp;
4765 tem = low0, low0 = low1, low1 = tem;
4766 tem = high0, high0 = high1, high1 = tem;
4767 }
4768
4769 /* Now flag two cases, whether the ranges are disjoint or whether the
4770 second range is totally subsumed in the first. Note that the tests
4771 below are simplified by the ones above. */
4772 no_overlap = integer_onep (range_binop (LT_EXPR, integer_type_node,
4773 high0, 1, low1, 0));
4774 subset = integer_onep (range_binop (LE_EXPR, integer_type_node,
4775 high1, 1, high0, 1));
4776
4777 /* We now have four cases, depending on whether we are including or
4778 excluding the two ranges. */
4779 if (in0_p && in1_p)
4780 {
4781 /* If they don't overlap, the result is false. If the second range
4782 is a subset it is the result. Otherwise, the range is from the start
4783 of the second to the end of the first. */
4784 if (no_overlap)
4785 in_p = 0, low = high = 0;
4786 else if (subset)
4787 in_p = 1, low = low1, high = high1;
4788 else
4789 in_p = 1, low = low1, high = high0;
4790 }
4791
4792 else if (in0_p && ! in1_p)
4793 {
4794 /* If they don't overlap, the result is the first range. If they are
4795 equal, the result is false. If the second range is a subset of the
4796 first, and the ranges begin at the same place, we go from just after
4797 the end of the second range to the end of the first. If the second
4798 range is not a subset of the first, or if it is a subset and both
4799 ranges end at the same place, the range starts at the start of the
4800 first range and ends just before the second range.
4801 Otherwise, we can't describe this as a single range. */
4802 if (no_overlap)
4803 in_p = 1, low = low0, high = high0;
4804 else if (lowequal && highequal)
4805 in_p = 0, low = high = 0;
4806 else if (subset && lowequal)
4807 {
4808 low = range_successor (high1);
4809 high = high0;
4810 in_p = 1;
4811 if (low == 0)
4812 {
4813 /* We are in the weird situation where high0 > high1 but
4814 high1 has no successor. Punt. */
4815 return 0;
4816 }
4817 }
4818 else if (! subset || highequal)
4819 {
4820 low = low0;
4821 high = range_predecessor (low1);
4822 in_p = 1;
4823 if (high == 0)
4824 {
4825 /* low0 < low1 but low1 has no predecessor. Punt. */
4826 return 0;
4827 }
4828 }
4829 else
4830 return 0;
4831 }
4832
4833 else if (! in0_p && in1_p)
4834 {
4835 /* If they don't overlap, the result is the second range. If the second
4836 is a subset of the first, the result is false. Otherwise,
4837 the range starts just after the first range and ends at the
4838 end of the second. */
4839 if (no_overlap)
4840 in_p = 1, low = low1, high = high1;
4841 else if (subset || highequal)
4842 in_p = 0, low = high = 0;
4843 else
4844 {
4845 low = range_successor (high0);
4846 high = high1;
4847 in_p = 1;
4848 if (low == 0)
4849 {
4850 /* high1 > high0 but high0 has no successor. Punt. */
4851 return 0;
4852 }
4853 }
4854 }
4855
4856 else
4857 {
4858 /* The case where we are excluding both ranges. Here the complex case
4859 is if they don't overlap. In that case, the only time we have a
4860 range is if they are adjacent. If the second is a subset of the
4861 first, the result is the first. Otherwise, the range to exclude
4862 starts at the beginning of the first range and ends at the end of the
4863 second. */
4864 if (no_overlap)
4865 {
4866 if (integer_onep (range_binop (EQ_EXPR, integer_type_node,
4867 range_successor (high0),
4868 1, low1, 0)))
4869 in_p = 0, low = low0, high = high1;
4870 else
4871 {
4872 /* Canonicalize - [min, x] into - [-, x]. */
4873 if (low0 && TREE_CODE (low0) == INTEGER_CST)
4874 switch (TREE_CODE (TREE_TYPE (low0)))
4875 {
4876 case ENUMERAL_TYPE:
4877 if (TYPE_PRECISION (TREE_TYPE (low0))
4878 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (low0))))
4879 break;
4880 /* FALLTHROUGH */
4881 case INTEGER_TYPE:
4882 if (tree_int_cst_equal (low0,
4883 TYPE_MIN_VALUE (TREE_TYPE (low0))))
4884 low0 = 0;
4885 break;
4886 case POINTER_TYPE:
4887 if (TYPE_UNSIGNED (TREE_TYPE (low0))
4888 && integer_zerop (low0))
4889 low0 = 0;
4890 break;
4891 default:
4892 break;
4893 }
4894
4895 /* Canonicalize - [x, max] into - [x, -]. */
4896 if (high1 && TREE_CODE (high1) == INTEGER_CST)
4897 switch (TREE_CODE (TREE_TYPE (high1)))
4898 {
4899 case ENUMERAL_TYPE:
4900 if (TYPE_PRECISION (TREE_TYPE (high1))
4901 != GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (high1))))
4902 break;
4903 /* FALLTHROUGH */
4904 case INTEGER_TYPE:
4905 if (tree_int_cst_equal (high1,
4906 TYPE_MAX_VALUE (TREE_TYPE (high1))))
4907 high1 = 0;
4908 break;
4909 case POINTER_TYPE:
4910 if (TYPE_UNSIGNED (TREE_TYPE (high1))
4911 && integer_zerop (range_binop (PLUS_EXPR, NULL_TREE,
4912 high1, 1,
4913 build_int_cst (TREE_TYPE (high1), 1),
4914 1)))
4915 high1 = 0;
4916 break;
4917 default:
4918 break;
4919 }
4920
4921 /* The ranges might be also adjacent between the maximum and
4922 minimum values of the given type. For
4923 - [{min,-}, x] and - [y, {max,-}] ranges where x + 1 < y
4924 return + [x + 1, y - 1]. */
4925 if (low0 == 0 && high1 == 0)
4926 {
4927 low = range_successor (high0);
4928 high = range_predecessor (low1);
4929 if (low == 0 || high == 0)
4930 return 0;
4931
4932 in_p = 1;
4933 }
4934 else
4935 return 0;
4936 }
4937 }
4938 else if (subset)
4939 in_p = 0, low = low0, high = high0;
4940 else
4941 in_p = 0, low = low0, high = high1;
4942 }
4943
4944 *pin_p = in_p, *plow = low, *phigh = high;
4945 return 1;
4946 }
4947 \f
4948
4949 /* Subroutine of fold, looking inside expressions of the form
4950 A op B ? A : C, where ARG0, ARG1 and ARG2 are the three operands
4951 of the COND_EXPR. This function is being used also to optimize
4952 A op B ? C : A, by reversing the comparison first.
4953
4954 Return a folded expression whose code is not a COND_EXPR
4955 anymore, or NULL_TREE if no folding opportunity is found. */
4956
4957 static tree
4958 fold_cond_expr_with_comparison (location_t loc, tree type,
4959 tree arg0, tree arg1, tree arg2)
4960 {
4961 enum tree_code comp_code = TREE_CODE (arg0);
4962 tree arg00 = TREE_OPERAND (arg0, 0);
4963 tree arg01 = TREE_OPERAND (arg0, 1);
4964 tree arg1_type = TREE_TYPE (arg1);
4965 tree tem;
4966
4967 STRIP_NOPS (arg1);
4968 STRIP_NOPS (arg2);
4969
4970 /* If we have A op 0 ? A : -A, consider applying the following
4971 transformations:
4972
4973 A == 0? A : -A same as -A
4974 A != 0? A : -A same as A
4975 A >= 0? A : -A same as abs (A)
4976 A > 0? A : -A same as abs (A)
4977 A <= 0? A : -A same as -abs (A)
4978 A < 0? A : -A same as -abs (A)
4979
4980 None of these transformations work for modes with signed
4981 zeros. If A is +/-0, the first two transformations will
4982 change the sign of the result (from +0 to -0, or vice
4983 versa). The last four will fix the sign of the result,
4984 even though the original expressions could be positive or
4985 negative, depending on the sign of A.
4986
4987 Note that all these transformations are correct if A is
4988 NaN, since the two alternatives (A and -A) are also NaNs. */
4989 if (!HONOR_SIGNED_ZEROS (element_mode (type))
4990 && (FLOAT_TYPE_P (TREE_TYPE (arg01))
4991 ? real_zerop (arg01)
4992 : integer_zerop (arg01))
4993 && ((TREE_CODE (arg2) == NEGATE_EXPR
4994 && operand_equal_p (TREE_OPERAND (arg2, 0), arg1, 0))
4995 /* In the case that A is of the form X-Y, '-A' (arg2) may
4996 have already been folded to Y-X, check for that. */
4997 || (TREE_CODE (arg1) == MINUS_EXPR
4998 && TREE_CODE (arg2) == MINUS_EXPR
4999 && operand_equal_p (TREE_OPERAND (arg1, 0),
5000 TREE_OPERAND (arg2, 1), 0)
5001 && operand_equal_p (TREE_OPERAND (arg1, 1),
5002 TREE_OPERAND (arg2, 0), 0))))
5003 switch (comp_code)
5004 {
5005 case EQ_EXPR:
5006 case UNEQ_EXPR:
5007 tem = fold_convert_loc (loc, arg1_type, arg1);
5008 return pedantic_non_lvalue_loc (loc,
5009 fold_convert_loc (loc, type,
5010 negate_expr (tem)));
5011 case NE_EXPR:
5012 case LTGT_EXPR:
5013 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5014 case UNGE_EXPR:
5015 case UNGT_EXPR:
5016 if (flag_trapping_math)
5017 break;
5018 /* Fall through. */
5019 case GE_EXPR:
5020 case GT_EXPR:
5021 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5022 break;
5023 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5024 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5025 case UNLE_EXPR:
5026 case UNLT_EXPR:
5027 if (flag_trapping_math)
5028 break;
5029 case LE_EXPR:
5030 case LT_EXPR:
5031 if (TYPE_UNSIGNED (TREE_TYPE (arg1)))
5032 break;
5033 tem = fold_build1_loc (loc, ABS_EXPR, TREE_TYPE (arg1), arg1);
5034 return negate_expr (fold_convert_loc (loc, type, tem));
5035 default:
5036 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5037 break;
5038 }
5039
5040 /* A != 0 ? A : 0 is simply A, unless A is -0. Likewise
5041 A == 0 ? A : 0 is always 0 unless A is -0. Note that
5042 both transformations are correct when A is NaN: A != 0
5043 is then true, and A == 0 is false. */
5044
5045 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5046 && integer_zerop (arg01) && integer_zerop (arg2))
5047 {
5048 if (comp_code == NE_EXPR)
5049 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5050 else if (comp_code == EQ_EXPR)
5051 return build_zero_cst (type);
5052 }
5053
5054 /* Try some transformations of A op B ? A : B.
5055
5056 A == B? A : B same as B
5057 A != B? A : B same as A
5058 A >= B? A : B same as max (A, B)
5059 A > B? A : B same as max (B, A)
5060 A <= B? A : B same as min (A, B)
5061 A < B? A : B same as min (B, A)
5062
5063 As above, these transformations don't work in the presence
5064 of signed zeros. For example, if A and B are zeros of
5065 opposite sign, the first two transformations will change
5066 the sign of the result. In the last four, the original
5067 expressions give different results for (A=+0, B=-0) and
5068 (A=-0, B=+0), but the transformed expressions do not.
5069
5070 The first two transformations are correct if either A or B
5071 is a NaN. In the first transformation, the condition will
5072 be false, and B will indeed be chosen. In the case of the
5073 second transformation, the condition A != B will be true,
5074 and A will be chosen.
5075
5076 The conversions to max() and min() are not correct if B is
5077 a number and A is not. The conditions in the original
5078 expressions will be false, so all four give B. The min()
5079 and max() versions would give a NaN instead. */
5080 if (!HONOR_SIGNED_ZEROS (element_mode (type))
5081 && operand_equal_for_comparison_p (arg01, arg2, arg00)
5082 /* Avoid these transformations if the COND_EXPR may be used
5083 as an lvalue in the C++ front-end. PR c++/19199. */
5084 && (in_gimple_form
5085 || VECTOR_TYPE_P (type)
5086 || (! lang_GNU_CXX ()
5087 && strcmp (lang_hooks.name, "GNU Objective-C++") != 0)
5088 || ! maybe_lvalue_p (arg1)
5089 || ! maybe_lvalue_p (arg2)))
5090 {
5091 tree comp_op0 = arg00;
5092 tree comp_op1 = arg01;
5093 tree comp_type = TREE_TYPE (comp_op0);
5094
5095 /* Avoid adding NOP_EXPRs in case this is an lvalue. */
5096 if (TYPE_MAIN_VARIANT (comp_type) == TYPE_MAIN_VARIANT (type))
5097 {
5098 comp_type = type;
5099 comp_op0 = arg1;
5100 comp_op1 = arg2;
5101 }
5102
5103 switch (comp_code)
5104 {
5105 case EQ_EXPR:
5106 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg2));
5107 case NE_EXPR:
5108 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
5109 case LE_EXPR:
5110 case LT_EXPR:
5111 case UNLE_EXPR:
5112 case UNLT_EXPR:
5113 /* In C++ a ?: expression can be an lvalue, so put the
5114 operand which will be used if they are equal first
5115 so that we can convert this back to the
5116 corresponding COND_EXPR. */
5117 if (!HONOR_NANS (arg1))
5118 {
5119 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5120 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5121 tem = (comp_code == LE_EXPR || comp_code == UNLE_EXPR)
5122 ? fold_build2_loc (loc, MIN_EXPR, comp_type, comp_op0, comp_op1)
5123 : fold_build2_loc (loc, MIN_EXPR, comp_type,
5124 comp_op1, comp_op0);
5125 return pedantic_non_lvalue_loc (loc,
5126 fold_convert_loc (loc, type, tem));
5127 }
5128 break;
5129 case GE_EXPR:
5130 case GT_EXPR:
5131 case UNGE_EXPR:
5132 case UNGT_EXPR:
5133 if (!HONOR_NANS (arg1))
5134 {
5135 comp_op0 = fold_convert_loc (loc, comp_type, comp_op0);
5136 comp_op1 = fold_convert_loc (loc, comp_type, comp_op1);
5137 tem = (comp_code == GE_EXPR || comp_code == UNGE_EXPR)
5138 ? fold_build2_loc (loc, MAX_EXPR, comp_type, comp_op0, comp_op1)
5139 : fold_build2_loc (loc, MAX_EXPR, comp_type,
5140 comp_op1, comp_op0);
5141 return pedantic_non_lvalue_loc (loc,
5142 fold_convert_loc (loc, type, tem));
5143 }
5144 break;
5145 case UNEQ_EXPR:
5146 if (!HONOR_NANS (arg1))
5147 return pedantic_non_lvalue_loc (loc,
5148 fold_convert_loc (loc, type, arg2));
5149 break;
5150 case LTGT_EXPR:
5151 if (!HONOR_NANS (arg1))
5152 return pedantic_non_lvalue_loc (loc,
5153 fold_convert_loc (loc, type, arg1));
5154 break;
5155 default:
5156 gcc_assert (TREE_CODE_CLASS (comp_code) == tcc_comparison);
5157 break;
5158 }
5159 }
5160
5161 /* If this is A op C1 ? A : C2 with C1 and C2 constant integers,
5162 we might still be able to simplify this. For example,
5163 if C1 is one less or one more than C2, this might have started
5164 out as a MIN or MAX and been transformed by this function.
5165 Only good for INTEGER_TYPEs, because we need TYPE_MAX_VALUE. */
5166
5167 if (INTEGRAL_TYPE_P (type)
5168 && TREE_CODE (arg01) == INTEGER_CST
5169 && TREE_CODE (arg2) == INTEGER_CST)
5170 switch (comp_code)
5171 {
5172 case EQ_EXPR:
5173 if (TREE_CODE (arg1) == INTEGER_CST)
5174 break;
5175 /* We can replace A with C1 in this case. */
5176 arg1 = fold_convert_loc (loc, type, arg01);
5177 return fold_build3_loc (loc, COND_EXPR, type, arg0, arg1, arg2);
5178
5179 case LT_EXPR:
5180 /* If C1 is C2 + 1, this is min(A, C2), but use ARG00's type for
5181 MIN_EXPR, to preserve the signedness of the comparison. */
5182 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5183 OEP_ONLY_CONST)
5184 && operand_equal_p (arg01,
5185 const_binop (PLUS_EXPR, arg2,
5186 build_int_cst (type, 1)),
5187 OEP_ONLY_CONST))
5188 {
5189 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5190 fold_convert_loc (loc, TREE_TYPE (arg00),
5191 arg2));
5192 return pedantic_non_lvalue_loc (loc,
5193 fold_convert_loc (loc, type, tem));
5194 }
5195 break;
5196
5197 case LE_EXPR:
5198 /* If C1 is C2 - 1, this is min(A, C2), with the same care
5199 as above. */
5200 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5201 OEP_ONLY_CONST)
5202 && operand_equal_p (arg01,
5203 const_binop (MINUS_EXPR, arg2,
5204 build_int_cst (type, 1)),
5205 OEP_ONLY_CONST))
5206 {
5207 tem = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (arg00), arg00,
5208 fold_convert_loc (loc, TREE_TYPE (arg00),
5209 arg2));
5210 return pedantic_non_lvalue_loc (loc,
5211 fold_convert_loc (loc, type, tem));
5212 }
5213 break;
5214
5215 case GT_EXPR:
5216 /* If C1 is C2 - 1, this is max(A, C2), but use ARG00's type for
5217 MAX_EXPR, to preserve the signedness of the comparison. */
5218 if (! operand_equal_p (arg2, TYPE_MIN_VALUE (type),
5219 OEP_ONLY_CONST)
5220 && operand_equal_p (arg01,
5221 const_binop (MINUS_EXPR, arg2,
5222 build_int_cst (type, 1)),
5223 OEP_ONLY_CONST))
5224 {
5225 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5226 fold_convert_loc (loc, TREE_TYPE (arg00),
5227 arg2));
5228 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5229 }
5230 break;
5231
5232 case GE_EXPR:
5233 /* If C1 is C2 + 1, this is max(A, C2), with the same care as above. */
5234 if (! operand_equal_p (arg2, TYPE_MAX_VALUE (type),
5235 OEP_ONLY_CONST)
5236 && operand_equal_p (arg01,
5237 const_binop (PLUS_EXPR, arg2,
5238 build_int_cst (type, 1)),
5239 OEP_ONLY_CONST))
5240 {
5241 tem = fold_build2_loc (loc, MAX_EXPR, TREE_TYPE (arg00), arg00,
5242 fold_convert_loc (loc, TREE_TYPE (arg00),
5243 arg2));
5244 return pedantic_non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
5245 }
5246 break;
5247 case NE_EXPR:
5248 break;
5249 default:
5250 gcc_unreachable ();
5251 }
5252
5253 return NULL_TREE;
5254 }
5255
5256
5257 \f
5258 #ifndef LOGICAL_OP_NON_SHORT_CIRCUIT
5259 #define LOGICAL_OP_NON_SHORT_CIRCUIT \
5260 (BRANCH_COST (optimize_function_for_speed_p (cfun), \
5261 false) >= 2)
5262 #endif
5263
5264 /* EXP is some logical combination of boolean tests. See if we can
5265 merge it into some range test. Return the new tree if so. */
5266
5267 static tree
5268 fold_range_test (location_t loc, enum tree_code code, tree type,
5269 tree op0, tree op1)
5270 {
5271 int or_op = (code == TRUTH_ORIF_EXPR
5272 || code == TRUTH_OR_EXPR);
5273 int in0_p, in1_p, in_p;
5274 tree low0, low1, low, high0, high1, high;
5275 bool strict_overflow_p = false;
5276 tree tem, lhs, rhs;
5277 const char * const warnmsg = G_("assuming signed overflow does not occur "
5278 "when simplifying range test");
5279
5280 if (!INTEGRAL_TYPE_P (type))
5281 return 0;
5282
5283 lhs = make_range (op0, &in0_p, &low0, &high0, &strict_overflow_p);
5284 rhs = make_range (op1, &in1_p, &low1, &high1, &strict_overflow_p);
5285
5286 /* If this is an OR operation, invert both sides; we will invert
5287 again at the end. */
5288 if (or_op)
5289 in0_p = ! in0_p, in1_p = ! in1_p;
5290
5291 /* If both expressions are the same, if we can merge the ranges, and we
5292 can build the range test, return it or it inverted. If one of the
5293 ranges is always true or always false, consider it to be the same
5294 expression as the other. */
5295 if ((lhs == 0 || rhs == 0 || operand_equal_p (lhs, rhs, 0))
5296 && merge_ranges (&in_p, &low, &high, in0_p, low0, high0,
5297 in1_p, low1, high1)
5298 && 0 != (tem = (build_range_check (loc, type,
5299 lhs != 0 ? lhs
5300 : rhs != 0 ? rhs : integer_zero_node,
5301 in_p, low, high))))
5302 {
5303 if (strict_overflow_p)
5304 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
5305 return or_op ? invert_truthvalue_loc (loc, tem) : tem;
5306 }
5307
5308 /* On machines where the branch cost is expensive, if this is a
5309 short-circuited branch and the underlying object on both sides
5310 is the same, make a non-short-circuit operation. */
5311 else if (LOGICAL_OP_NON_SHORT_CIRCUIT
5312 && lhs != 0 && rhs != 0
5313 && (code == TRUTH_ANDIF_EXPR
5314 || code == TRUTH_ORIF_EXPR)
5315 && operand_equal_p (lhs, rhs, 0))
5316 {
5317 /* If simple enough, just rewrite. Otherwise, make a SAVE_EXPR
5318 unless we are at top level or LHS contains a PLACEHOLDER_EXPR, in
5319 which cases we can't do this. */
5320 if (simple_operand_p (lhs))
5321 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5322 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5323 type, op0, op1);
5324
5325 else if (!lang_hooks.decls.global_bindings_p ()
5326 && !CONTAINS_PLACEHOLDER_P (lhs))
5327 {
5328 tree common = save_expr (lhs);
5329
5330 if (0 != (lhs = build_range_check (loc, type, common,
5331 or_op ? ! in0_p : in0_p,
5332 low0, high0))
5333 && (0 != (rhs = build_range_check (loc, type, common,
5334 or_op ? ! in1_p : in1_p,
5335 low1, high1))))
5336 {
5337 if (strict_overflow_p)
5338 fold_overflow_warning (warnmsg,
5339 WARN_STRICT_OVERFLOW_COMPARISON);
5340 return build2_loc (loc, code == TRUTH_ANDIF_EXPR
5341 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR,
5342 type, lhs, rhs);
5343 }
5344 }
5345 }
5346
5347 return 0;
5348 }
5349 \f
5350 /* Subroutine for fold_truth_andor_1: C is an INTEGER_CST interpreted as a P
5351 bit value. Arrange things so the extra bits will be set to zero if and
5352 only if C is signed-extended to its full width. If MASK is nonzero,
5353 it is an INTEGER_CST that should be AND'ed with the extra bits. */
5354
5355 static tree
5356 unextend (tree c, int p, int unsignedp, tree mask)
5357 {
5358 tree type = TREE_TYPE (c);
5359 int modesize = GET_MODE_BITSIZE (TYPE_MODE (type));
5360 tree temp;
5361
5362 if (p == modesize || unsignedp)
5363 return c;
5364
5365 /* We work by getting just the sign bit into the low-order bit, then
5366 into the high-order bit, then sign-extend. We then XOR that value
5367 with C. */
5368 temp = build_int_cst (TREE_TYPE (c), wi::extract_uhwi (c, p - 1, 1));
5369
5370 /* We must use a signed type in order to get an arithmetic right shift.
5371 However, we must also avoid introducing accidental overflows, so that
5372 a subsequent call to integer_zerop will work. Hence we must
5373 do the type conversion here. At this point, the constant is either
5374 zero or one, and the conversion to a signed type can never overflow.
5375 We could get an overflow if this conversion is done anywhere else. */
5376 if (TYPE_UNSIGNED (type))
5377 temp = fold_convert (signed_type_for (type), temp);
5378
5379 temp = const_binop (LSHIFT_EXPR, temp, size_int (modesize - 1));
5380 temp = const_binop (RSHIFT_EXPR, temp, size_int (modesize - p - 1));
5381 if (mask != 0)
5382 temp = const_binop (BIT_AND_EXPR, temp,
5383 fold_convert (TREE_TYPE (c), mask));
5384 /* If necessary, convert the type back to match the type of C. */
5385 if (TYPE_UNSIGNED (type))
5386 temp = fold_convert (type, temp);
5387
5388 return fold_convert (type, const_binop (BIT_XOR_EXPR, c, temp));
5389 }
5390 \f
5391 /* For an expression that has the form
5392 (A && B) || ~B
5393 or
5394 (A || B) && ~B,
5395 we can drop one of the inner expressions and simplify to
5396 A || ~B
5397 or
5398 A && ~B
5399 LOC is the location of the resulting expression. OP is the inner
5400 logical operation; the left-hand side in the examples above, while CMPOP
5401 is the right-hand side. RHS_ONLY is used to prevent us from accidentally
5402 removing a condition that guards another, as in
5403 (A != NULL && A->...) || A == NULL
5404 which we must not transform. If RHS_ONLY is true, only eliminate the
5405 right-most operand of the inner logical operation. */
5406
5407 static tree
5408 merge_truthop_with_opposite_arm (location_t loc, tree op, tree cmpop,
5409 bool rhs_only)
5410 {
5411 tree type = TREE_TYPE (cmpop);
5412 enum tree_code code = TREE_CODE (cmpop);
5413 enum tree_code truthop_code = TREE_CODE (op);
5414 tree lhs = TREE_OPERAND (op, 0);
5415 tree rhs = TREE_OPERAND (op, 1);
5416 tree orig_lhs = lhs, orig_rhs = rhs;
5417 enum tree_code rhs_code = TREE_CODE (rhs);
5418 enum tree_code lhs_code = TREE_CODE (lhs);
5419 enum tree_code inv_code;
5420
5421 if (TREE_SIDE_EFFECTS (op) || TREE_SIDE_EFFECTS (cmpop))
5422 return NULL_TREE;
5423
5424 if (TREE_CODE_CLASS (code) != tcc_comparison)
5425 return NULL_TREE;
5426
5427 if (rhs_code == truthop_code)
5428 {
5429 tree newrhs = merge_truthop_with_opposite_arm (loc, rhs, cmpop, rhs_only);
5430 if (newrhs != NULL_TREE)
5431 {
5432 rhs = newrhs;
5433 rhs_code = TREE_CODE (rhs);
5434 }
5435 }
5436 if (lhs_code == truthop_code && !rhs_only)
5437 {
5438 tree newlhs = merge_truthop_with_opposite_arm (loc, lhs, cmpop, false);
5439 if (newlhs != NULL_TREE)
5440 {
5441 lhs = newlhs;
5442 lhs_code = TREE_CODE (lhs);
5443 }
5444 }
5445
5446 inv_code = invert_tree_comparison (code, HONOR_NANS (type));
5447 if (inv_code == rhs_code
5448 && operand_equal_p (TREE_OPERAND (rhs, 0), TREE_OPERAND (cmpop, 0), 0)
5449 && operand_equal_p (TREE_OPERAND (rhs, 1), TREE_OPERAND (cmpop, 1), 0))
5450 return lhs;
5451 if (!rhs_only && inv_code == lhs_code
5452 && operand_equal_p (TREE_OPERAND (lhs, 0), TREE_OPERAND (cmpop, 0), 0)
5453 && operand_equal_p (TREE_OPERAND (lhs, 1), TREE_OPERAND (cmpop, 1), 0))
5454 return rhs;
5455 if (rhs != orig_rhs || lhs != orig_lhs)
5456 return fold_build2_loc (loc, truthop_code, TREE_TYPE (cmpop),
5457 lhs, rhs);
5458 return NULL_TREE;
5459 }
5460
5461 /* Find ways of folding logical expressions of LHS and RHS:
5462 Try to merge two comparisons to the same innermost item.
5463 Look for range tests like "ch >= '0' && ch <= '9'".
5464 Look for combinations of simple terms on machines with expensive branches
5465 and evaluate the RHS unconditionally.
5466
5467 For example, if we have p->a == 2 && p->b == 4 and we can make an
5468 object large enough to span both A and B, we can do this with a comparison
5469 against the object ANDed with the a mask.
5470
5471 If we have p->a == q->a && p->b == q->b, we may be able to use bit masking
5472 operations to do this with one comparison.
5473
5474 We check for both normal comparisons and the BIT_AND_EXPRs made this by
5475 function and the one above.
5476
5477 CODE is the logical operation being done. It can be TRUTH_ANDIF_EXPR,
5478 TRUTH_AND_EXPR, TRUTH_ORIF_EXPR, or TRUTH_OR_EXPR.
5479
5480 TRUTH_TYPE is the type of the logical operand and LHS and RHS are its
5481 two operands.
5482
5483 We return the simplified tree or 0 if no optimization is possible. */
5484
5485 static tree
5486 fold_truth_andor_1 (location_t loc, enum tree_code code, tree truth_type,
5487 tree lhs, tree rhs)
5488 {
5489 /* If this is the "or" of two comparisons, we can do something if
5490 the comparisons are NE_EXPR. If this is the "and", we can do something
5491 if the comparisons are EQ_EXPR. I.e.,
5492 (a->b == 2 && a->c == 4) can become (a->new == NEW).
5493
5494 WANTED_CODE is this operation code. For single bit fields, we can
5495 convert EQ_EXPR to NE_EXPR so we need not reject the "wrong"
5496 comparison for one-bit fields. */
5497
5498 enum tree_code wanted_code;
5499 enum tree_code lcode, rcode;
5500 tree ll_arg, lr_arg, rl_arg, rr_arg;
5501 tree ll_inner, lr_inner, rl_inner, rr_inner;
5502 HOST_WIDE_INT ll_bitsize, ll_bitpos, lr_bitsize, lr_bitpos;
5503 HOST_WIDE_INT rl_bitsize, rl_bitpos, rr_bitsize, rr_bitpos;
5504 HOST_WIDE_INT xll_bitpos, xlr_bitpos, xrl_bitpos, xrr_bitpos;
5505 HOST_WIDE_INT lnbitsize, lnbitpos, rnbitsize, rnbitpos;
5506 int ll_unsignedp, lr_unsignedp, rl_unsignedp, rr_unsignedp;
5507 int ll_reversep, lr_reversep, rl_reversep, rr_reversep;
5508 machine_mode ll_mode, lr_mode, rl_mode, rr_mode;
5509 machine_mode lnmode, rnmode;
5510 tree ll_mask, lr_mask, rl_mask, rr_mask;
5511 tree ll_and_mask, lr_and_mask, rl_and_mask, rr_and_mask;
5512 tree l_const, r_const;
5513 tree lntype, rntype, result;
5514 HOST_WIDE_INT first_bit, end_bit;
5515 int volatilep;
5516
5517 /* Start by getting the comparison codes. Fail if anything is volatile.
5518 If one operand is a BIT_AND_EXPR with the constant one, treat it as if
5519 it were surrounded with a NE_EXPR. */
5520
5521 if (TREE_SIDE_EFFECTS (lhs) || TREE_SIDE_EFFECTS (rhs))
5522 return 0;
5523
5524 lcode = TREE_CODE (lhs);
5525 rcode = TREE_CODE (rhs);
5526
5527 if (lcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (lhs, 1)))
5528 {
5529 lhs = build2 (NE_EXPR, truth_type, lhs,
5530 build_int_cst (TREE_TYPE (lhs), 0));
5531 lcode = NE_EXPR;
5532 }
5533
5534 if (rcode == BIT_AND_EXPR && integer_onep (TREE_OPERAND (rhs, 1)))
5535 {
5536 rhs = build2 (NE_EXPR, truth_type, rhs,
5537 build_int_cst (TREE_TYPE (rhs), 0));
5538 rcode = NE_EXPR;
5539 }
5540
5541 if (TREE_CODE_CLASS (lcode) != tcc_comparison
5542 || TREE_CODE_CLASS (rcode) != tcc_comparison)
5543 return 0;
5544
5545 ll_arg = TREE_OPERAND (lhs, 0);
5546 lr_arg = TREE_OPERAND (lhs, 1);
5547 rl_arg = TREE_OPERAND (rhs, 0);
5548 rr_arg = TREE_OPERAND (rhs, 1);
5549
5550 /* Simplify (x<y) && (x==y) into (x<=y) and related optimizations. */
5551 if (simple_operand_p (ll_arg)
5552 && simple_operand_p (lr_arg))
5553 {
5554 if (operand_equal_p (ll_arg, rl_arg, 0)
5555 && operand_equal_p (lr_arg, rr_arg, 0))
5556 {
5557 result = combine_comparisons (loc, code, lcode, rcode,
5558 truth_type, ll_arg, lr_arg);
5559 if (result)
5560 return result;
5561 }
5562 else if (operand_equal_p (ll_arg, rr_arg, 0)
5563 && operand_equal_p (lr_arg, rl_arg, 0))
5564 {
5565 result = combine_comparisons (loc, code, lcode,
5566 swap_tree_comparison (rcode),
5567 truth_type, ll_arg, lr_arg);
5568 if (result)
5569 return result;
5570 }
5571 }
5572
5573 code = ((code == TRUTH_AND_EXPR || code == TRUTH_ANDIF_EXPR)
5574 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR);
5575
5576 /* If the RHS can be evaluated unconditionally and its operands are
5577 simple, it wins to evaluate the RHS unconditionally on machines
5578 with expensive branches. In this case, this isn't a comparison
5579 that can be merged. */
5580
5581 if (BRANCH_COST (optimize_function_for_speed_p (cfun),
5582 false) >= 2
5583 && ! FLOAT_TYPE_P (TREE_TYPE (rl_arg))
5584 && simple_operand_p (rl_arg)
5585 && simple_operand_p (rr_arg))
5586 {
5587 /* Convert (a != 0) || (b != 0) into (a | b) != 0. */
5588 if (code == TRUTH_OR_EXPR
5589 && lcode == NE_EXPR && integer_zerop (lr_arg)
5590 && rcode == NE_EXPR && integer_zerop (rr_arg)
5591 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5592 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5593 return build2_loc (loc, NE_EXPR, truth_type,
5594 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5595 ll_arg, rl_arg),
5596 build_int_cst (TREE_TYPE (ll_arg), 0));
5597
5598 /* Convert (a == 0) && (b == 0) into (a | b) == 0. */
5599 if (code == TRUTH_AND_EXPR
5600 && lcode == EQ_EXPR && integer_zerop (lr_arg)
5601 && rcode == EQ_EXPR && integer_zerop (rr_arg)
5602 && TREE_TYPE (ll_arg) == TREE_TYPE (rl_arg)
5603 && INTEGRAL_TYPE_P (TREE_TYPE (ll_arg)))
5604 return build2_loc (loc, EQ_EXPR, truth_type,
5605 build2 (BIT_IOR_EXPR, TREE_TYPE (ll_arg),
5606 ll_arg, rl_arg),
5607 build_int_cst (TREE_TYPE (ll_arg), 0));
5608 }
5609
5610 /* See if the comparisons can be merged. Then get all the parameters for
5611 each side. */
5612
5613 if ((lcode != EQ_EXPR && lcode != NE_EXPR)
5614 || (rcode != EQ_EXPR && rcode != NE_EXPR))
5615 return 0;
5616
5617 ll_reversep = lr_reversep = rl_reversep = rr_reversep = 0;
5618 volatilep = 0;
5619 ll_inner = decode_field_reference (loc, ll_arg,
5620 &ll_bitsize, &ll_bitpos, &ll_mode,
5621 &ll_unsignedp, &ll_reversep, &volatilep,
5622 &ll_mask, &ll_and_mask);
5623 lr_inner = decode_field_reference (loc, lr_arg,
5624 &lr_bitsize, &lr_bitpos, &lr_mode,
5625 &lr_unsignedp, &lr_reversep, &volatilep,
5626 &lr_mask, &lr_and_mask);
5627 rl_inner = decode_field_reference (loc, rl_arg,
5628 &rl_bitsize, &rl_bitpos, &rl_mode,
5629 &rl_unsignedp, &rl_reversep, &volatilep,
5630 &rl_mask, &rl_and_mask);
5631 rr_inner = decode_field_reference (loc, rr_arg,
5632 &rr_bitsize, &rr_bitpos, &rr_mode,
5633 &rr_unsignedp, &rr_reversep, &volatilep,
5634 &rr_mask, &rr_and_mask);
5635
5636 /* It must be true that the inner operation on the lhs of each
5637 comparison must be the same if we are to be able to do anything.
5638 Then see if we have constants. If not, the same must be true for
5639 the rhs's. */
5640 if (volatilep
5641 || ll_reversep != rl_reversep
5642 || ll_inner == 0 || rl_inner == 0
5643 || ! operand_equal_p (ll_inner, rl_inner, 0))
5644 return 0;
5645
5646 if (TREE_CODE (lr_arg) == INTEGER_CST
5647 && TREE_CODE (rr_arg) == INTEGER_CST)
5648 {
5649 l_const = lr_arg, r_const = rr_arg;
5650 lr_reversep = ll_reversep;
5651 }
5652 else if (lr_reversep != rr_reversep
5653 || lr_inner == 0 || rr_inner == 0
5654 || ! operand_equal_p (lr_inner, rr_inner, 0))
5655 return 0;
5656 else
5657 l_const = r_const = 0;
5658
5659 /* If either comparison code is not correct for our logical operation,
5660 fail. However, we can convert a one-bit comparison against zero into
5661 the opposite comparison against that bit being set in the field. */
5662
5663 wanted_code = (code == TRUTH_AND_EXPR ? EQ_EXPR : NE_EXPR);
5664 if (lcode != wanted_code)
5665 {
5666 if (l_const && integer_zerop (l_const) && integer_pow2p (ll_mask))
5667 {
5668 /* Make the left operand unsigned, since we are only interested
5669 in the value of one bit. Otherwise we are doing the wrong
5670 thing below. */
5671 ll_unsignedp = 1;
5672 l_const = ll_mask;
5673 }
5674 else
5675 return 0;
5676 }
5677
5678 /* This is analogous to the code for l_const above. */
5679 if (rcode != wanted_code)
5680 {
5681 if (r_const && integer_zerop (r_const) && integer_pow2p (rl_mask))
5682 {
5683 rl_unsignedp = 1;
5684 r_const = rl_mask;
5685 }
5686 else
5687 return 0;
5688 }
5689
5690 /* See if we can find a mode that contains both fields being compared on
5691 the left. If we can't, fail. Otherwise, update all constants and masks
5692 to be relative to a field of that size. */
5693 first_bit = MIN (ll_bitpos, rl_bitpos);
5694 end_bit = MAX (ll_bitpos + ll_bitsize, rl_bitpos + rl_bitsize);
5695 lnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5696 TYPE_ALIGN (TREE_TYPE (ll_inner)), word_mode,
5697 volatilep);
5698 if (lnmode == VOIDmode)
5699 return 0;
5700
5701 lnbitsize = GET_MODE_BITSIZE (lnmode);
5702 lnbitpos = first_bit & ~ (lnbitsize - 1);
5703 lntype = lang_hooks.types.type_for_size (lnbitsize, 1);
5704 xll_bitpos = ll_bitpos - lnbitpos, xrl_bitpos = rl_bitpos - lnbitpos;
5705
5706 if (ll_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5707 {
5708 xll_bitpos = lnbitsize - xll_bitpos - ll_bitsize;
5709 xrl_bitpos = lnbitsize - xrl_bitpos - rl_bitsize;
5710 }
5711
5712 ll_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, ll_mask),
5713 size_int (xll_bitpos));
5714 rl_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc, lntype, rl_mask),
5715 size_int (xrl_bitpos));
5716
5717 if (l_const)
5718 {
5719 l_const = fold_convert_loc (loc, lntype, l_const);
5720 l_const = unextend (l_const, ll_bitsize, ll_unsignedp, ll_and_mask);
5721 l_const = const_binop (LSHIFT_EXPR, l_const, size_int (xll_bitpos));
5722 if (! integer_zerop (const_binop (BIT_AND_EXPR, l_const,
5723 fold_build1_loc (loc, BIT_NOT_EXPR,
5724 lntype, ll_mask))))
5725 {
5726 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5727
5728 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5729 }
5730 }
5731 if (r_const)
5732 {
5733 r_const = fold_convert_loc (loc, lntype, r_const);
5734 r_const = unextend (r_const, rl_bitsize, rl_unsignedp, rl_and_mask);
5735 r_const = const_binop (LSHIFT_EXPR, r_const, size_int (xrl_bitpos));
5736 if (! integer_zerop (const_binop (BIT_AND_EXPR, r_const,
5737 fold_build1_loc (loc, BIT_NOT_EXPR,
5738 lntype, rl_mask))))
5739 {
5740 warning (0, "comparison is always %d", wanted_code == NE_EXPR);
5741
5742 return constant_boolean_node (wanted_code == NE_EXPR, truth_type);
5743 }
5744 }
5745
5746 /* If the right sides are not constant, do the same for it. Also,
5747 disallow this optimization if a size or signedness mismatch occurs
5748 between the left and right sides. */
5749 if (l_const == 0)
5750 {
5751 if (ll_bitsize != lr_bitsize || rl_bitsize != rr_bitsize
5752 || ll_unsignedp != lr_unsignedp || rl_unsignedp != rr_unsignedp
5753 /* Make sure the two fields on the right
5754 correspond to the left without being swapped. */
5755 || ll_bitpos - rl_bitpos != lr_bitpos - rr_bitpos)
5756 return 0;
5757
5758 first_bit = MIN (lr_bitpos, rr_bitpos);
5759 end_bit = MAX (lr_bitpos + lr_bitsize, rr_bitpos + rr_bitsize);
5760 rnmode = get_best_mode (end_bit - first_bit, first_bit, 0, 0,
5761 TYPE_ALIGN (TREE_TYPE (lr_inner)), word_mode,
5762 volatilep);
5763 if (rnmode == VOIDmode)
5764 return 0;
5765
5766 rnbitsize = GET_MODE_BITSIZE (rnmode);
5767 rnbitpos = first_bit & ~ (rnbitsize - 1);
5768 rntype = lang_hooks.types.type_for_size (rnbitsize, 1);
5769 xlr_bitpos = lr_bitpos - rnbitpos, xrr_bitpos = rr_bitpos - rnbitpos;
5770
5771 if (lr_reversep ? !BYTES_BIG_ENDIAN : BYTES_BIG_ENDIAN)
5772 {
5773 xlr_bitpos = rnbitsize - xlr_bitpos - lr_bitsize;
5774 xrr_bitpos = rnbitsize - xrr_bitpos - rr_bitsize;
5775 }
5776
5777 lr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5778 rntype, lr_mask),
5779 size_int (xlr_bitpos));
5780 rr_mask = const_binop (LSHIFT_EXPR, fold_convert_loc (loc,
5781 rntype, rr_mask),
5782 size_int (xrr_bitpos));
5783
5784 /* Make a mask that corresponds to both fields being compared.
5785 Do this for both items being compared. If the operands are the
5786 same size and the bits being compared are in the same position
5787 then we can do this by masking both and comparing the masked
5788 results. */
5789 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5790 lr_mask = const_binop (BIT_IOR_EXPR, lr_mask, rr_mask);
5791 if (lnbitsize == rnbitsize && xll_bitpos == xlr_bitpos)
5792 {
5793 lhs = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5794 ll_unsignedp || rl_unsignedp, ll_reversep);
5795 if (! all_ones_mask_p (ll_mask, lnbitsize))
5796 lhs = build2 (BIT_AND_EXPR, lntype, lhs, ll_mask);
5797
5798 rhs = make_bit_field_ref (loc, lr_inner, rntype, rnbitsize, rnbitpos,
5799 lr_unsignedp || rr_unsignedp, lr_reversep);
5800 if (! all_ones_mask_p (lr_mask, rnbitsize))
5801 rhs = build2 (BIT_AND_EXPR, rntype, rhs, lr_mask);
5802
5803 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5804 }
5805
5806 /* There is still another way we can do something: If both pairs of
5807 fields being compared are adjacent, we may be able to make a wider
5808 field containing them both.
5809
5810 Note that we still must mask the lhs/rhs expressions. Furthermore,
5811 the mask must be shifted to account for the shift done by
5812 make_bit_field_ref. */
5813 if ((ll_bitsize + ll_bitpos == rl_bitpos
5814 && lr_bitsize + lr_bitpos == rr_bitpos)
5815 || (ll_bitpos == rl_bitpos + rl_bitsize
5816 && lr_bitpos == rr_bitpos + rr_bitsize))
5817 {
5818 tree type;
5819
5820 lhs = make_bit_field_ref (loc, ll_inner, lntype,
5821 ll_bitsize + rl_bitsize,
5822 MIN (ll_bitpos, rl_bitpos),
5823 ll_unsignedp, ll_reversep);
5824 rhs = make_bit_field_ref (loc, lr_inner, rntype,
5825 lr_bitsize + rr_bitsize,
5826 MIN (lr_bitpos, rr_bitpos),
5827 lr_unsignedp, lr_reversep);
5828
5829 ll_mask = const_binop (RSHIFT_EXPR, ll_mask,
5830 size_int (MIN (xll_bitpos, xrl_bitpos)));
5831 lr_mask = const_binop (RSHIFT_EXPR, lr_mask,
5832 size_int (MIN (xlr_bitpos, xrr_bitpos)));
5833
5834 /* Convert to the smaller type before masking out unwanted bits. */
5835 type = lntype;
5836 if (lntype != rntype)
5837 {
5838 if (lnbitsize > rnbitsize)
5839 {
5840 lhs = fold_convert_loc (loc, rntype, lhs);
5841 ll_mask = fold_convert_loc (loc, rntype, ll_mask);
5842 type = rntype;
5843 }
5844 else if (lnbitsize < rnbitsize)
5845 {
5846 rhs = fold_convert_loc (loc, lntype, rhs);
5847 lr_mask = fold_convert_loc (loc, lntype, lr_mask);
5848 type = lntype;
5849 }
5850 }
5851
5852 if (! all_ones_mask_p (ll_mask, ll_bitsize + rl_bitsize))
5853 lhs = build2 (BIT_AND_EXPR, type, lhs, ll_mask);
5854
5855 if (! all_ones_mask_p (lr_mask, lr_bitsize + rr_bitsize))
5856 rhs = build2 (BIT_AND_EXPR, type, rhs, lr_mask);
5857
5858 return build2_loc (loc, wanted_code, truth_type, lhs, rhs);
5859 }
5860
5861 return 0;
5862 }
5863
5864 /* Handle the case of comparisons with constants. If there is something in
5865 common between the masks, those bits of the constants must be the same.
5866 If not, the condition is always false. Test for this to avoid generating
5867 incorrect code below. */
5868 result = const_binop (BIT_AND_EXPR, ll_mask, rl_mask);
5869 if (! integer_zerop (result)
5870 && simple_cst_equal (const_binop (BIT_AND_EXPR, result, l_const),
5871 const_binop (BIT_AND_EXPR, result, r_const)) != 1)
5872 {
5873 if (wanted_code == NE_EXPR)
5874 {
5875 warning (0, "%<or%> of unmatched not-equal tests is always 1");
5876 return constant_boolean_node (true, truth_type);
5877 }
5878 else
5879 {
5880 warning (0, "%<and%> of mutually exclusive equal-tests is always 0");
5881 return constant_boolean_node (false, truth_type);
5882 }
5883 }
5884
5885 /* Construct the expression we will return. First get the component
5886 reference we will make. Unless the mask is all ones the width of
5887 that field, perform the mask operation. Then compare with the
5888 merged constant. */
5889 result = make_bit_field_ref (loc, ll_inner, lntype, lnbitsize, lnbitpos,
5890 ll_unsignedp || rl_unsignedp, ll_reversep);
5891
5892 ll_mask = const_binop (BIT_IOR_EXPR, ll_mask, rl_mask);
5893 if (! all_ones_mask_p (ll_mask, lnbitsize))
5894 result = build2_loc (loc, BIT_AND_EXPR, lntype, result, ll_mask);
5895
5896 return build2_loc (loc, wanted_code, truth_type, result,
5897 const_binop (BIT_IOR_EXPR, l_const, r_const));
5898 }
5899 \f
5900 /* Optimize T, which is a comparison of a MIN_EXPR or MAX_EXPR with a
5901 constant. */
5902
5903 static tree
5904 optimize_minmax_comparison (location_t loc, enum tree_code code, tree type,
5905 tree op0, tree op1)
5906 {
5907 tree arg0 = op0;
5908 enum tree_code op_code;
5909 tree comp_const;
5910 tree minmax_const;
5911 int consts_equal, consts_lt;
5912 tree inner;
5913
5914 STRIP_SIGN_NOPS (arg0);
5915
5916 op_code = TREE_CODE (arg0);
5917 minmax_const = TREE_OPERAND (arg0, 1);
5918 comp_const = fold_convert_loc (loc, TREE_TYPE (arg0), op1);
5919 consts_equal = tree_int_cst_equal (minmax_const, comp_const);
5920 consts_lt = tree_int_cst_lt (minmax_const, comp_const);
5921 inner = TREE_OPERAND (arg0, 0);
5922
5923 /* If something does not permit us to optimize, return the original tree. */
5924 if ((op_code != MIN_EXPR && op_code != MAX_EXPR)
5925 || TREE_CODE (comp_const) != INTEGER_CST
5926 || TREE_OVERFLOW (comp_const)
5927 || TREE_CODE (minmax_const) != INTEGER_CST
5928 || TREE_OVERFLOW (minmax_const))
5929 return NULL_TREE;
5930
5931 /* Now handle all the various comparison codes. We only handle EQ_EXPR
5932 and GT_EXPR, doing the rest with recursive calls using logical
5933 simplifications. */
5934 switch (code)
5935 {
5936 case NE_EXPR: case LT_EXPR: case LE_EXPR:
5937 {
5938 tree tem
5939 = optimize_minmax_comparison (loc,
5940 invert_tree_comparison (code, false),
5941 type, op0, op1);
5942 if (tem)
5943 return invert_truthvalue_loc (loc, tem);
5944 return NULL_TREE;
5945 }
5946
5947 case GE_EXPR:
5948 return
5949 fold_build2_loc (loc, TRUTH_ORIF_EXPR, type,
5950 optimize_minmax_comparison
5951 (loc, EQ_EXPR, type, arg0, comp_const),
5952 optimize_minmax_comparison
5953 (loc, GT_EXPR, type, arg0, comp_const));
5954
5955 case EQ_EXPR:
5956 if (op_code == MAX_EXPR && consts_equal)
5957 /* MAX (X, 0) == 0 -> X <= 0 */
5958 return fold_build2_loc (loc, LE_EXPR, type, inner, comp_const);
5959
5960 else if (op_code == MAX_EXPR && consts_lt)
5961 /* MAX (X, 0) == 5 -> X == 5 */
5962 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5963
5964 else if (op_code == MAX_EXPR)
5965 /* MAX (X, 0) == -1 -> false */
5966 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5967
5968 else if (consts_equal)
5969 /* MIN (X, 0) == 0 -> X >= 0 */
5970 return fold_build2_loc (loc, GE_EXPR, type, inner, comp_const);
5971
5972 else if (consts_lt)
5973 /* MIN (X, 0) == 5 -> false */
5974 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5975
5976 else
5977 /* MIN (X, 0) == -1 -> X == -1 */
5978 return fold_build2_loc (loc, EQ_EXPR, type, inner, comp_const);
5979
5980 case GT_EXPR:
5981 if (op_code == MAX_EXPR && (consts_equal || consts_lt))
5982 /* MAX (X, 0) > 0 -> X > 0
5983 MAX (X, 0) > 5 -> X > 5 */
5984 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5985
5986 else if (op_code == MAX_EXPR)
5987 /* MAX (X, 0) > -1 -> true */
5988 return omit_one_operand_loc (loc, type, integer_one_node, inner);
5989
5990 else if (op_code == MIN_EXPR && (consts_equal || consts_lt))
5991 /* MIN (X, 0) > 0 -> false
5992 MIN (X, 0) > 5 -> false */
5993 return omit_one_operand_loc (loc, type, integer_zero_node, inner);
5994
5995 else
5996 /* MIN (X, 0) > -1 -> X > -1 */
5997 return fold_build2_loc (loc, GT_EXPR, type, inner, comp_const);
5998
5999 default:
6000 return NULL_TREE;
6001 }
6002 }
6003 \f
6004 /* T is an integer expression that is being multiplied, divided, or taken a
6005 modulus (CODE says which and what kind of divide or modulus) by a
6006 constant C. See if we can eliminate that operation by folding it with
6007 other operations already in T. WIDE_TYPE, if non-null, is a type that
6008 should be used for the computation if wider than our type.
6009
6010 For example, if we are dividing (X * 8) + (Y * 16) by 4, we can return
6011 (X * 2) + (Y * 4). We must, however, be assured that either the original
6012 expression would not overflow or that overflow is undefined for the type
6013 in the language in question.
6014
6015 If we return a non-null expression, it is an equivalent form of the
6016 original computation, but need not be in the original type.
6017
6018 We set *STRICT_OVERFLOW_P to true if the return values depends on
6019 signed overflow being undefined. Otherwise we do not change
6020 *STRICT_OVERFLOW_P. */
6021
6022 static tree
6023 extract_muldiv (tree t, tree c, enum tree_code code, tree wide_type,
6024 bool *strict_overflow_p)
6025 {
6026 /* To avoid exponential search depth, refuse to allow recursion past
6027 three levels. Beyond that (1) it's highly unlikely that we'll find
6028 something interesting and (2) we've probably processed it before
6029 when we built the inner expression. */
6030
6031 static int depth;
6032 tree ret;
6033
6034 if (depth > 3)
6035 return NULL;
6036
6037 depth++;
6038 ret = extract_muldiv_1 (t, c, code, wide_type, strict_overflow_p);
6039 depth--;
6040
6041 return ret;
6042 }
6043
6044 static tree
6045 extract_muldiv_1 (tree t, tree c, enum tree_code code, tree wide_type,
6046 bool *strict_overflow_p)
6047 {
6048 tree type = TREE_TYPE (t);
6049 enum tree_code tcode = TREE_CODE (t);
6050 tree ctype = (wide_type != 0 && (GET_MODE_SIZE (TYPE_MODE (wide_type))
6051 > GET_MODE_SIZE (TYPE_MODE (type)))
6052 ? wide_type : type);
6053 tree t1, t2;
6054 int same_p = tcode == code;
6055 tree op0 = NULL_TREE, op1 = NULL_TREE;
6056 bool sub_strict_overflow_p;
6057
6058 /* Don't deal with constants of zero here; they confuse the code below. */
6059 if (integer_zerop (c))
6060 return NULL_TREE;
6061
6062 if (TREE_CODE_CLASS (tcode) == tcc_unary)
6063 op0 = TREE_OPERAND (t, 0);
6064
6065 if (TREE_CODE_CLASS (tcode) == tcc_binary)
6066 op0 = TREE_OPERAND (t, 0), op1 = TREE_OPERAND (t, 1);
6067
6068 /* Note that we need not handle conditional operations here since fold
6069 already handles those cases. So just do arithmetic here. */
6070 switch (tcode)
6071 {
6072 case INTEGER_CST:
6073 /* For a constant, we can always simplify if we are a multiply
6074 or (for divide and modulus) if it is a multiple of our constant. */
6075 if (code == MULT_EXPR
6076 || wi::multiple_of_p (t, c, TYPE_SIGN (type)))
6077 {
6078 tree tem = const_binop (code, fold_convert (ctype, t),
6079 fold_convert (ctype, c));
6080 /* If the multiplication overflowed to INT_MIN then we lost sign
6081 information on it and a subsequent multiplication might
6082 spuriously overflow. See PR68142. */
6083 if (TREE_OVERFLOW (tem)
6084 && wi::eq_p (tem, wi::min_value (TYPE_PRECISION (ctype), SIGNED)))
6085 return NULL_TREE;
6086 return tem;
6087 }
6088 break;
6089
6090 CASE_CONVERT: case NON_LVALUE_EXPR:
6091 /* If op0 is an expression ... */
6092 if ((COMPARISON_CLASS_P (op0)
6093 || UNARY_CLASS_P (op0)
6094 || BINARY_CLASS_P (op0)
6095 || VL_EXP_CLASS_P (op0)
6096 || EXPRESSION_CLASS_P (op0))
6097 /* ... and has wrapping overflow, and its type is smaller
6098 than ctype, then we cannot pass through as widening. */
6099 && (((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6100 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (op0)))
6101 && (TYPE_PRECISION (ctype)
6102 > TYPE_PRECISION (TREE_TYPE (op0))))
6103 /* ... or this is a truncation (t is narrower than op0),
6104 then we cannot pass through this narrowing. */
6105 || (TYPE_PRECISION (type)
6106 < TYPE_PRECISION (TREE_TYPE (op0)))
6107 /* ... or signedness changes for division or modulus,
6108 then we cannot pass through this conversion. */
6109 || (code != MULT_EXPR
6110 && (TYPE_UNSIGNED (ctype)
6111 != TYPE_UNSIGNED (TREE_TYPE (op0))))
6112 /* ... or has undefined overflow while the converted to
6113 type has not, we cannot do the operation in the inner type
6114 as that would introduce undefined overflow. */
6115 || ((ANY_INTEGRAL_TYPE_P (TREE_TYPE (op0))
6116 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (op0)))
6117 && !TYPE_OVERFLOW_UNDEFINED (type))))
6118 break;
6119
6120 /* Pass the constant down and see if we can make a simplification. If
6121 we can, replace this expression with the inner simplification for
6122 possible later conversion to our or some other type. */
6123 if ((t2 = fold_convert (TREE_TYPE (op0), c)) != 0
6124 && TREE_CODE (t2) == INTEGER_CST
6125 && !TREE_OVERFLOW (t2)
6126 && (0 != (t1 = extract_muldiv (op0, t2, code,
6127 code == MULT_EXPR
6128 ? ctype : NULL_TREE,
6129 strict_overflow_p))))
6130 return t1;
6131 break;
6132
6133 case ABS_EXPR:
6134 /* If widening the type changes it from signed to unsigned, then we
6135 must avoid building ABS_EXPR itself as unsigned. */
6136 if (TYPE_UNSIGNED (ctype) && !TYPE_UNSIGNED (type))
6137 {
6138 tree cstype = (*signed_type_for) (ctype);
6139 if ((t1 = extract_muldiv (op0, c, code, cstype, strict_overflow_p))
6140 != 0)
6141 {
6142 t1 = fold_build1 (tcode, cstype, fold_convert (cstype, t1));
6143 return fold_convert (ctype, t1);
6144 }
6145 break;
6146 }
6147 /* If the constant is negative, we cannot simplify this. */
6148 if (tree_int_cst_sgn (c) == -1)
6149 break;
6150 /* FALLTHROUGH */
6151 case NEGATE_EXPR:
6152 /* For division and modulus, type can't be unsigned, as e.g.
6153 (-(x / 2U)) / 2U isn't equal to -((x / 2U) / 2U) for x >= 2.
6154 For signed types, even with wrapping overflow, this is fine. */
6155 if (code != MULT_EXPR && TYPE_UNSIGNED (type))
6156 break;
6157 if ((t1 = extract_muldiv (op0, c, code, wide_type, strict_overflow_p))
6158 != 0)
6159 return fold_build1 (tcode, ctype, fold_convert (ctype, t1));
6160 break;
6161
6162 case MIN_EXPR: case MAX_EXPR:
6163 /* If widening the type changes the signedness, then we can't perform
6164 this optimization as that changes the result. */
6165 if (TYPE_UNSIGNED (ctype) != TYPE_UNSIGNED (type))
6166 break;
6167
6168 /* MIN (a, b) / 5 -> MIN (a / 5, b / 5) */
6169 sub_strict_overflow_p = false;
6170 if ((t1 = extract_muldiv (op0, c, code, wide_type,
6171 &sub_strict_overflow_p)) != 0
6172 && (t2 = extract_muldiv (op1, c, code, wide_type,
6173 &sub_strict_overflow_p)) != 0)
6174 {
6175 if (tree_int_cst_sgn (c) < 0)
6176 tcode = (tcode == MIN_EXPR ? MAX_EXPR : MIN_EXPR);
6177 if (sub_strict_overflow_p)
6178 *strict_overflow_p = true;
6179 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6180 fold_convert (ctype, t2));
6181 }
6182 break;
6183
6184 case LSHIFT_EXPR: case RSHIFT_EXPR:
6185 /* If the second operand is constant, this is a multiplication
6186 or floor division, by a power of two, so we can treat it that
6187 way unless the multiplier or divisor overflows. Signed
6188 left-shift overflow is implementation-defined rather than
6189 undefined in C90, so do not convert signed left shift into
6190 multiplication. */
6191 if (TREE_CODE (op1) == INTEGER_CST
6192 && (tcode == RSHIFT_EXPR || TYPE_UNSIGNED (TREE_TYPE (op0)))
6193 /* const_binop may not detect overflow correctly,
6194 so check for it explicitly here. */
6195 && wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
6196 && 0 != (t1 = fold_convert (ctype,
6197 const_binop (LSHIFT_EXPR,
6198 size_one_node,
6199 op1)))
6200 && !TREE_OVERFLOW (t1))
6201 return extract_muldiv (build2 (tcode == LSHIFT_EXPR
6202 ? MULT_EXPR : FLOOR_DIV_EXPR,
6203 ctype,
6204 fold_convert (ctype, op0),
6205 t1),
6206 c, code, wide_type, strict_overflow_p);
6207 break;
6208
6209 case PLUS_EXPR: case MINUS_EXPR:
6210 /* See if we can eliminate the operation on both sides. If we can, we
6211 can return a new PLUS or MINUS. If we can't, the only remaining
6212 cases where we can do anything are if the second operand is a
6213 constant. */
6214 sub_strict_overflow_p = false;
6215 t1 = extract_muldiv (op0, c, code, wide_type, &sub_strict_overflow_p);
6216 t2 = extract_muldiv (op1, c, code, wide_type, &sub_strict_overflow_p);
6217 if (t1 != 0 && t2 != 0
6218 && (code == MULT_EXPR
6219 /* If not multiplication, we can only do this if both operands
6220 are divisible by c. */
6221 || (multiple_of_p (ctype, op0, c)
6222 && multiple_of_p (ctype, op1, c))))
6223 {
6224 if (sub_strict_overflow_p)
6225 *strict_overflow_p = true;
6226 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6227 fold_convert (ctype, t2));
6228 }
6229
6230 /* If this was a subtraction, negate OP1 and set it to be an addition.
6231 This simplifies the logic below. */
6232 if (tcode == MINUS_EXPR)
6233 {
6234 tcode = PLUS_EXPR, op1 = negate_expr (op1);
6235 /* If OP1 was not easily negatable, the constant may be OP0. */
6236 if (TREE_CODE (op0) == INTEGER_CST)
6237 {
6238 std::swap (op0, op1);
6239 std::swap (t1, t2);
6240 }
6241 }
6242
6243 if (TREE_CODE (op1) != INTEGER_CST)
6244 break;
6245
6246 /* If either OP1 or C are negative, this optimization is not safe for
6247 some of the division and remainder types while for others we need
6248 to change the code. */
6249 if (tree_int_cst_sgn (op1) < 0 || tree_int_cst_sgn (c) < 0)
6250 {
6251 if (code == CEIL_DIV_EXPR)
6252 code = FLOOR_DIV_EXPR;
6253 else if (code == FLOOR_DIV_EXPR)
6254 code = CEIL_DIV_EXPR;
6255 else if (code != MULT_EXPR
6256 && code != CEIL_MOD_EXPR && code != FLOOR_MOD_EXPR)
6257 break;
6258 }
6259
6260 /* If it's a multiply or a division/modulus operation of a multiple
6261 of our constant, do the operation and verify it doesn't overflow. */
6262 if (code == MULT_EXPR
6263 || wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6264 {
6265 op1 = const_binop (code, fold_convert (ctype, op1),
6266 fold_convert (ctype, c));
6267 /* We allow the constant to overflow with wrapping semantics. */
6268 if (op1 == 0
6269 || (TREE_OVERFLOW (op1) && !TYPE_OVERFLOW_WRAPS (ctype)))
6270 break;
6271 }
6272 else
6273 break;
6274
6275 /* If we have an unsigned type, we cannot widen the operation since it
6276 will change the result if the original computation overflowed. */
6277 if (TYPE_UNSIGNED (ctype) && ctype != type)
6278 break;
6279
6280 /* If we were able to eliminate our operation from the first side,
6281 apply our operation to the second side and reform the PLUS. */
6282 if (t1 != 0 && (TREE_CODE (t1) != code || code == MULT_EXPR))
6283 return fold_build2 (tcode, ctype, fold_convert (ctype, t1), op1);
6284
6285 /* The last case is if we are a multiply. In that case, we can
6286 apply the distributive law to commute the multiply and addition
6287 if the multiplication of the constants doesn't overflow
6288 and overflow is defined. With undefined overflow
6289 op0 * c might overflow, while (op0 + orig_op1) * c doesn't. */
6290 if (code == MULT_EXPR && TYPE_OVERFLOW_WRAPS (ctype))
6291 return fold_build2 (tcode, ctype,
6292 fold_build2 (code, ctype,
6293 fold_convert (ctype, op0),
6294 fold_convert (ctype, c)),
6295 op1);
6296
6297 break;
6298
6299 case MULT_EXPR:
6300 /* We have a special case here if we are doing something like
6301 (C * 8) % 4 since we know that's zero. */
6302 if ((code == TRUNC_MOD_EXPR || code == CEIL_MOD_EXPR
6303 || code == FLOOR_MOD_EXPR || code == ROUND_MOD_EXPR)
6304 /* If the multiplication can overflow we cannot optimize this. */
6305 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (t))
6306 && TREE_CODE (TREE_OPERAND (t, 1)) == INTEGER_CST
6307 && wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6308 {
6309 *strict_overflow_p = true;
6310 return omit_one_operand (type, integer_zero_node, op0);
6311 }
6312
6313 /* ... fall through ... */
6314
6315 case TRUNC_DIV_EXPR: case CEIL_DIV_EXPR: case FLOOR_DIV_EXPR:
6316 case ROUND_DIV_EXPR: case EXACT_DIV_EXPR:
6317 /* If we can extract our operation from the LHS, do so and return a
6318 new operation. Likewise for the RHS from a MULT_EXPR. Otherwise,
6319 do something only if the second operand is a constant. */
6320 if (same_p
6321 && (t1 = extract_muldiv (op0, c, code, wide_type,
6322 strict_overflow_p)) != 0)
6323 return fold_build2 (tcode, ctype, fold_convert (ctype, t1),
6324 fold_convert (ctype, op1));
6325 else if (tcode == MULT_EXPR && code == MULT_EXPR
6326 && (t1 = extract_muldiv (op1, c, code, wide_type,
6327 strict_overflow_p)) != 0)
6328 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6329 fold_convert (ctype, t1));
6330 else if (TREE_CODE (op1) != INTEGER_CST)
6331 return 0;
6332
6333 /* If these are the same operation types, we can associate them
6334 assuming no overflow. */
6335 if (tcode == code)
6336 {
6337 bool overflow_p = false;
6338 bool overflow_mul_p;
6339 signop sign = TYPE_SIGN (ctype);
6340 wide_int mul = wi::mul (op1, c, sign, &overflow_mul_p);
6341 overflow_p = TREE_OVERFLOW (c) | TREE_OVERFLOW (op1);
6342 if (overflow_mul_p
6343 && ((sign == UNSIGNED && tcode != MULT_EXPR) || sign == SIGNED))
6344 overflow_p = true;
6345 if (!overflow_p)
6346 {
6347 mul = wide_int::from (mul, TYPE_PRECISION (ctype),
6348 TYPE_SIGN (TREE_TYPE (op1)));
6349 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6350 wide_int_to_tree (ctype, mul));
6351 }
6352 }
6353
6354 /* If these operations "cancel" each other, we have the main
6355 optimizations of this pass, which occur when either constant is a
6356 multiple of the other, in which case we replace this with either an
6357 operation or CODE or TCODE.
6358
6359 If we have an unsigned type, we cannot do this since it will change
6360 the result if the original computation overflowed. */
6361 if (TYPE_OVERFLOW_UNDEFINED (ctype)
6362 && ((code == MULT_EXPR && tcode == EXACT_DIV_EXPR)
6363 || (tcode == MULT_EXPR
6364 && code != TRUNC_MOD_EXPR && code != CEIL_MOD_EXPR
6365 && code != FLOOR_MOD_EXPR && code != ROUND_MOD_EXPR
6366 && code != MULT_EXPR)))
6367 {
6368 if (wi::multiple_of_p (op1, c, TYPE_SIGN (type)))
6369 {
6370 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6371 *strict_overflow_p = true;
6372 return fold_build2 (tcode, ctype, fold_convert (ctype, op0),
6373 fold_convert (ctype,
6374 const_binop (TRUNC_DIV_EXPR,
6375 op1, c)));
6376 }
6377 else if (wi::multiple_of_p (c, op1, TYPE_SIGN (type)))
6378 {
6379 if (TYPE_OVERFLOW_UNDEFINED (ctype))
6380 *strict_overflow_p = true;
6381 return fold_build2 (code, ctype, fold_convert (ctype, op0),
6382 fold_convert (ctype,
6383 const_binop (TRUNC_DIV_EXPR,
6384 c, op1)));
6385 }
6386 }
6387 break;
6388
6389 default:
6390 break;
6391 }
6392
6393 return 0;
6394 }
6395 \f
6396 /* Return a node which has the indicated constant VALUE (either 0 or
6397 1 for scalars or {-1,-1,..} or {0,0,...} for vectors),
6398 and is of the indicated TYPE. */
6399
6400 tree
6401 constant_boolean_node (bool value, tree type)
6402 {
6403 if (type == integer_type_node)
6404 return value ? integer_one_node : integer_zero_node;
6405 else if (type == boolean_type_node)
6406 return value ? boolean_true_node : boolean_false_node;
6407 else if (TREE_CODE (type) == VECTOR_TYPE)
6408 return build_vector_from_val (type,
6409 build_int_cst (TREE_TYPE (type),
6410 value ? -1 : 0));
6411 else
6412 return fold_convert (type, value ? integer_one_node : integer_zero_node);
6413 }
6414
6415
6416 /* Transform `a + (b ? x : y)' into `b ? (a + x) : (a + y)'.
6417 Transform, `a + (x < y)' into `(x < y) ? (a + 1) : (a + 0)'. Here
6418 CODE corresponds to the `+', COND to the `(b ? x : y)' or `(x < y)'
6419 expression, and ARG to `a'. If COND_FIRST_P is nonzero, then the
6420 COND is the first argument to CODE; otherwise (as in the example
6421 given here), it is the second argument. TYPE is the type of the
6422 original expression. Return NULL_TREE if no simplification is
6423 possible. */
6424
6425 static tree
6426 fold_binary_op_with_conditional_arg (location_t loc,
6427 enum tree_code code,
6428 tree type, tree op0, tree op1,
6429 tree cond, tree arg, int cond_first_p)
6430 {
6431 tree cond_type = cond_first_p ? TREE_TYPE (op0) : TREE_TYPE (op1);
6432 tree arg_type = cond_first_p ? TREE_TYPE (op1) : TREE_TYPE (op0);
6433 tree test, true_value, false_value;
6434 tree lhs = NULL_TREE;
6435 tree rhs = NULL_TREE;
6436 enum tree_code cond_code = COND_EXPR;
6437
6438 if (TREE_CODE (cond) == COND_EXPR
6439 || TREE_CODE (cond) == VEC_COND_EXPR)
6440 {
6441 test = TREE_OPERAND (cond, 0);
6442 true_value = TREE_OPERAND (cond, 1);
6443 false_value = TREE_OPERAND (cond, 2);
6444 /* If this operand throws an expression, then it does not make
6445 sense to try to perform a logical or arithmetic operation
6446 involving it. */
6447 if (VOID_TYPE_P (TREE_TYPE (true_value)))
6448 lhs = true_value;
6449 if (VOID_TYPE_P (TREE_TYPE (false_value)))
6450 rhs = false_value;
6451 }
6452 else
6453 {
6454 tree testtype = TREE_TYPE (cond);
6455 test = cond;
6456 true_value = constant_boolean_node (true, testtype);
6457 false_value = constant_boolean_node (false, testtype);
6458 }
6459
6460 if (TREE_CODE (TREE_TYPE (test)) == VECTOR_TYPE)
6461 cond_code = VEC_COND_EXPR;
6462
6463 /* This transformation is only worthwhile if we don't have to wrap ARG
6464 in a SAVE_EXPR and the operation can be simplified without recursing
6465 on at least one of the branches once its pushed inside the COND_EXPR. */
6466 if (!TREE_CONSTANT (arg)
6467 && (TREE_SIDE_EFFECTS (arg)
6468 || TREE_CODE (arg) == COND_EXPR || TREE_CODE (arg) == VEC_COND_EXPR
6469 || TREE_CONSTANT (true_value) || TREE_CONSTANT (false_value)))
6470 return NULL_TREE;
6471
6472 arg = fold_convert_loc (loc, arg_type, arg);
6473 if (lhs == 0)
6474 {
6475 true_value = fold_convert_loc (loc, cond_type, true_value);
6476 if (cond_first_p)
6477 lhs = fold_build2_loc (loc, code, type, true_value, arg);
6478 else
6479 lhs = fold_build2_loc (loc, code, type, arg, true_value);
6480 }
6481 if (rhs == 0)
6482 {
6483 false_value = fold_convert_loc (loc, cond_type, false_value);
6484 if (cond_first_p)
6485 rhs = fold_build2_loc (loc, code, type, false_value, arg);
6486 else
6487 rhs = fold_build2_loc (loc, code, type, arg, false_value);
6488 }
6489
6490 /* Check that we have simplified at least one of the branches. */
6491 if (!TREE_CONSTANT (arg) && !TREE_CONSTANT (lhs) && !TREE_CONSTANT (rhs))
6492 return NULL_TREE;
6493
6494 return fold_build3_loc (loc, cond_code, type, test, lhs, rhs);
6495 }
6496
6497 \f
6498 /* Subroutine of fold() that checks for the addition of +/- 0.0.
6499
6500 If !NEGATE, return true if ADDEND is +/-0.0 and, for all X of type
6501 TYPE, X + ADDEND is the same as X. If NEGATE, return true if X -
6502 ADDEND is the same as X.
6503
6504 X + 0 and X - 0 both give X when X is NaN, infinite, or nonzero
6505 and finite. The problematic cases are when X is zero, and its mode
6506 has signed zeros. In the case of rounding towards -infinity,
6507 X - 0 is not the same as X because 0 - 0 is -0. In other rounding
6508 modes, X + 0 is not the same as X because -0 + 0 is 0. */
6509
6510 bool
6511 fold_real_zero_addition_p (const_tree type, const_tree addend, int negate)
6512 {
6513 if (!real_zerop (addend))
6514 return false;
6515
6516 /* Don't allow the fold with -fsignaling-nans. */
6517 if (HONOR_SNANS (element_mode (type)))
6518 return false;
6519
6520 /* Allow the fold if zeros aren't signed, or their sign isn't important. */
6521 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
6522 return true;
6523
6524 /* In a vector or complex, we would need to check the sign of all zeros. */
6525 if (TREE_CODE (addend) != REAL_CST)
6526 return false;
6527
6528 /* Treat x + -0 as x - 0 and x - -0 as x + 0. */
6529 if (REAL_VALUE_MINUS_ZERO (TREE_REAL_CST (addend)))
6530 negate = !negate;
6531
6532 /* The mode has signed zeros, and we have to honor their sign.
6533 In this situation, there is only one case we can return true for.
6534 X - 0 is the same as X unless rounding towards -infinity is
6535 supported. */
6536 return negate && !HONOR_SIGN_DEPENDENT_ROUNDING (element_mode (type));
6537 }
6538
6539 /* Subroutine of fold() that optimizes comparisons of a division by
6540 a nonzero integer constant against an integer constant, i.e.
6541 X/C1 op C2.
6542
6543 CODE is the comparison operator: EQ_EXPR, NE_EXPR, GT_EXPR, LT_EXPR,
6544 GE_EXPR or LE_EXPR. TYPE is the type of the result and ARG0 and ARG1
6545 are the operands of the comparison. ARG1 must be a TREE_REAL_CST.
6546
6547 The function returns the constant folded tree if a simplification
6548 can be made, and NULL_TREE otherwise. */
6549
6550 static tree
6551 fold_div_compare (location_t loc,
6552 enum tree_code code, tree type, tree arg0, tree arg1)
6553 {
6554 tree prod, tmp, hi, lo;
6555 tree arg00 = TREE_OPERAND (arg0, 0);
6556 tree arg01 = TREE_OPERAND (arg0, 1);
6557 signop sign = TYPE_SIGN (TREE_TYPE (arg0));
6558 bool neg_overflow = false;
6559 bool overflow;
6560
6561 /* We have to do this the hard way to detect unsigned overflow.
6562 prod = int_const_binop (MULT_EXPR, arg01, arg1); */
6563 wide_int val = wi::mul (arg01, arg1, sign, &overflow);
6564 prod = force_fit_type (TREE_TYPE (arg00), val, -1, overflow);
6565 neg_overflow = false;
6566
6567 if (sign == UNSIGNED)
6568 {
6569 tmp = int_const_binop (MINUS_EXPR, arg01,
6570 build_int_cst (TREE_TYPE (arg01), 1));
6571 lo = prod;
6572
6573 /* Likewise hi = int_const_binop (PLUS_EXPR, prod, tmp). */
6574 val = wi::add (prod, tmp, sign, &overflow);
6575 hi = force_fit_type (TREE_TYPE (arg00), val,
6576 -1, overflow | TREE_OVERFLOW (prod));
6577 }
6578 else if (tree_int_cst_sgn (arg01) >= 0)
6579 {
6580 tmp = int_const_binop (MINUS_EXPR, arg01,
6581 build_int_cst (TREE_TYPE (arg01), 1));
6582 switch (tree_int_cst_sgn (arg1))
6583 {
6584 case -1:
6585 neg_overflow = true;
6586 lo = int_const_binop (MINUS_EXPR, prod, tmp);
6587 hi = prod;
6588 break;
6589
6590 case 0:
6591 lo = fold_negate_const (tmp, TREE_TYPE (arg0));
6592 hi = tmp;
6593 break;
6594
6595 case 1:
6596 hi = int_const_binop (PLUS_EXPR, prod, tmp);
6597 lo = prod;
6598 break;
6599
6600 default:
6601 gcc_unreachable ();
6602 }
6603 }
6604 else
6605 {
6606 /* A negative divisor reverses the relational operators. */
6607 code = swap_tree_comparison (code);
6608
6609 tmp = int_const_binop (PLUS_EXPR, arg01,
6610 build_int_cst (TREE_TYPE (arg01), 1));
6611 switch (tree_int_cst_sgn (arg1))
6612 {
6613 case -1:
6614 hi = int_const_binop (MINUS_EXPR, prod, tmp);
6615 lo = prod;
6616 break;
6617
6618 case 0:
6619 hi = fold_negate_const (tmp, TREE_TYPE (arg0));
6620 lo = tmp;
6621 break;
6622
6623 case 1:
6624 neg_overflow = true;
6625 lo = int_const_binop (PLUS_EXPR, prod, tmp);
6626 hi = prod;
6627 break;
6628
6629 default:
6630 gcc_unreachable ();
6631 }
6632 }
6633
6634 switch (code)
6635 {
6636 case EQ_EXPR:
6637 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6638 return omit_one_operand_loc (loc, type, integer_zero_node, arg00);
6639 if (TREE_OVERFLOW (hi))
6640 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6641 if (TREE_OVERFLOW (lo))
6642 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6643 return build_range_check (loc, type, arg00, 1, lo, hi);
6644
6645 case NE_EXPR:
6646 if (TREE_OVERFLOW (lo) && TREE_OVERFLOW (hi))
6647 return omit_one_operand_loc (loc, type, integer_one_node, arg00);
6648 if (TREE_OVERFLOW (hi))
6649 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6650 if (TREE_OVERFLOW (lo))
6651 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6652 return build_range_check (loc, type, arg00, 0, lo, hi);
6653
6654 case LT_EXPR:
6655 if (TREE_OVERFLOW (lo))
6656 {
6657 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6658 return omit_one_operand_loc (loc, type, tmp, arg00);
6659 }
6660 return fold_build2_loc (loc, LT_EXPR, type, arg00, lo);
6661
6662 case LE_EXPR:
6663 if (TREE_OVERFLOW (hi))
6664 {
6665 tmp = neg_overflow ? integer_zero_node : integer_one_node;
6666 return omit_one_operand_loc (loc, type, tmp, arg00);
6667 }
6668 return fold_build2_loc (loc, LE_EXPR, type, arg00, hi);
6669
6670 case GT_EXPR:
6671 if (TREE_OVERFLOW (hi))
6672 {
6673 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6674 return omit_one_operand_loc (loc, type, tmp, arg00);
6675 }
6676 return fold_build2_loc (loc, GT_EXPR, type, arg00, hi);
6677
6678 case GE_EXPR:
6679 if (TREE_OVERFLOW (lo))
6680 {
6681 tmp = neg_overflow ? integer_one_node : integer_zero_node;
6682 return omit_one_operand_loc (loc, type, tmp, arg00);
6683 }
6684 return fold_build2_loc (loc, GE_EXPR, type, arg00, lo);
6685
6686 default:
6687 break;
6688 }
6689
6690 return NULL_TREE;
6691 }
6692
6693
6694 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6695 equality/inequality test, then return a simplified form of the test
6696 using a sign testing. Otherwise return NULL. TYPE is the desired
6697 result type. */
6698
6699 static tree
6700 fold_single_bit_test_into_sign_test (location_t loc,
6701 enum tree_code code, tree arg0, tree arg1,
6702 tree result_type)
6703 {
6704 /* If this is testing a single bit, we can optimize the test. */
6705 if ((code == NE_EXPR || code == EQ_EXPR)
6706 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6707 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6708 {
6709 /* If we have (A & C) != 0 where C is the sign bit of A, convert
6710 this into A < 0. Similarly for (A & C) == 0 into A >= 0. */
6711 tree arg00 = sign_bit_p (TREE_OPERAND (arg0, 0), TREE_OPERAND (arg0, 1));
6712
6713 if (arg00 != NULL_TREE
6714 /* This is only a win if casting to a signed type is cheap,
6715 i.e. when arg00's type is not a partial mode. */
6716 && TYPE_PRECISION (TREE_TYPE (arg00))
6717 == GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (arg00))))
6718 {
6719 tree stype = signed_type_for (TREE_TYPE (arg00));
6720 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
6721 result_type,
6722 fold_convert_loc (loc, stype, arg00),
6723 build_int_cst (stype, 0));
6724 }
6725 }
6726
6727 return NULL_TREE;
6728 }
6729
6730 /* If CODE with arguments ARG0 and ARG1 represents a single bit
6731 equality/inequality test, then return a simplified form of
6732 the test using shifts and logical operations. Otherwise return
6733 NULL. TYPE is the desired result type. */
6734
6735 tree
6736 fold_single_bit_test (location_t loc, enum tree_code code,
6737 tree arg0, tree arg1, tree result_type)
6738 {
6739 /* If this is testing a single bit, we can optimize the test. */
6740 if ((code == NE_EXPR || code == EQ_EXPR)
6741 && TREE_CODE (arg0) == BIT_AND_EXPR && integer_zerop (arg1)
6742 && integer_pow2p (TREE_OPERAND (arg0, 1)))
6743 {
6744 tree inner = TREE_OPERAND (arg0, 0);
6745 tree type = TREE_TYPE (arg0);
6746 int bitnum = tree_log2 (TREE_OPERAND (arg0, 1));
6747 machine_mode operand_mode = TYPE_MODE (type);
6748 int ops_unsigned;
6749 tree signed_type, unsigned_type, intermediate_type;
6750 tree tem, one;
6751
6752 /* First, see if we can fold the single bit test into a sign-bit
6753 test. */
6754 tem = fold_single_bit_test_into_sign_test (loc, code, arg0, arg1,
6755 result_type);
6756 if (tem)
6757 return tem;
6758
6759 /* Otherwise we have (A & C) != 0 where C is a single bit,
6760 convert that into ((A >> C2) & 1). Where C2 = log2(C).
6761 Similarly for (A & C) == 0. */
6762
6763 /* If INNER is a right shift of a constant and it plus BITNUM does
6764 not overflow, adjust BITNUM and INNER. */
6765 if (TREE_CODE (inner) == RSHIFT_EXPR
6766 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST
6767 && bitnum < TYPE_PRECISION (type)
6768 && wi::ltu_p (TREE_OPERAND (inner, 1),
6769 TYPE_PRECISION (type) - bitnum))
6770 {
6771 bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1));
6772 inner = TREE_OPERAND (inner, 0);
6773 }
6774
6775 /* If we are going to be able to omit the AND below, we must do our
6776 operations as unsigned. If we must use the AND, we have a choice.
6777 Normally unsigned is faster, but for some machines signed is. */
6778 ops_unsigned = (LOAD_EXTEND_OP (operand_mode) == SIGN_EXTEND
6779 && !flag_syntax_only) ? 0 : 1;
6780
6781 signed_type = lang_hooks.types.type_for_mode (operand_mode, 0);
6782 unsigned_type = lang_hooks.types.type_for_mode (operand_mode, 1);
6783 intermediate_type = ops_unsigned ? unsigned_type : signed_type;
6784 inner = fold_convert_loc (loc, intermediate_type, inner);
6785
6786 if (bitnum != 0)
6787 inner = build2 (RSHIFT_EXPR, intermediate_type,
6788 inner, size_int (bitnum));
6789
6790 one = build_int_cst (intermediate_type, 1);
6791
6792 if (code == EQ_EXPR)
6793 inner = fold_build2_loc (loc, BIT_XOR_EXPR, intermediate_type, inner, one);
6794
6795 /* Put the AND last so it can combine with more things. */
6796 inner = build2 (BIT_AND_EXPR, intermediate_type, inner, one);
6797
6798 /* Make sure to return the proper type. */
6799 inner = fold_convert_loc (loc, result_type, inner);
6800
6801 return inner;
6802 }
6803 return NULL_TREE;
6804 }
6805
6806 /* Check whether we are allowed to reorder operands arg0 and arg1,
6807 such that the evaluation of arg1 occurs before arg0. */
6808
6809 static bool
6810 reorder_operands_p (const_tree arg0, const_tree arg1)
6811 {
6812 if (! flag_evaluation_order)
6813 return true;
6814 if (TREE_CONSTANT (arg0) || TREE_CONSTANT (arg1))
6815 return true;
6816 return ! TREE_SIDE_EFFECTS (arg0)
6817 && ! TREE_SIDE_EFFECTS (arg1);
6818 }
6819
6820 /* Test whether it is preferable two swap two operands, ARG0 and
6821 ARG1, for example because ARG0 is an integer constant and ARG1
6822 isn't. If REORDER is true, only recommend swapping if we can
6823 evaluate the operands in reverse order. */
6824
6825 bool
6826 tree_swap_operands_p (const_tree arg0, const_tree arg1, bool reorder)
6827 {
6828 if (CONSTANT_CLASS_P (arg1))
6829 return 0;
6830 if (CONSTANT_CLASS_P (arg0))
6831 return 1;
6832
6833 STRIP_NOPS (arg0);
6834 STRIP_NOPS (arg1);
6835
6836 if (TREE_CONSTANT (arg1))
6837 return 0;
6838 if (TREE_CONSTANT (arg0))
6839 return 1;
6840
6841 if (reorder && flag_evaluation_order
6842 && (TREE_SIDE_EFFECTS (arg0) || TREE_SIDE_EFFECTS (arg1)))
6843 return 0;
6844
6845 /* It is preferable to swap two SSA_NAME to ensure a canonical form
6846 for commutative and comparison operators. Ensuring a canonical
6847 form allows the optimizers to find additional redundancies without
6848 having to explicitly check for both orderings. */
6849 if (TREE_CODE (arg0) == SSA_NAME
6850 && TREE_CODE (arg1) == SSA_NAME
6851 && SSA_NAME_VERSION (arg0) > SSA_NAME_VERSION (arg1))
6852 return 1;
6853
6854 /* Put SSA_NAMEs last. */
6855 if (TREE_CODE (arg1) == SSA_NAME)
6856 return 0;
6857 if (TREE_CODE (arg0) == SSA_NAME)
6858 return 1;
6859
6860 /* Put variables last. */
6861 if (DECL_P (arg1))
6862 return 0;
6863 if (DECL_P (arg0))
6864 return 1;
6865
6866 return 0;
6867 }
6868
6869
6870 /* Fold A < X && A + 1 > Y to A < X && A >= Y. Normally A + 1 > Y
6871 means A >= Y && A != MAX, but in this case we know that
6872 A < X <= MAX. INEQ is A + 1 > Y, BOUND is A < X. */
6873
6874 static tree
6875 fold_to_nonsharp_ineq_using_bound (location_t loc, tree ineq, tree bound)
6876 {
6877 tree a, typea, type = TREE_TYPE (ineq), a1, diff, y;
6878
6879 if (TREE_CODE (bound) == LT_EXPR)
6880 a = TREE_OPERAND (bound, 0);
6881 else if (TREE_CODE (bound) == GT_EXPR)
6882 a = TREE_OPERAND (bound, 1);
6883 else
6884 return NULL_TREE;
6885
6886 typea = TREE_TYPE (a);
6887 if (!INTEGRAL_TYPE_P (typea)
6888 && !POINTER_TYPE_P (typea))
6889 return NULL_TREE;
6890
6891 if (TREE_CODE (ineq) == LT_EXPR)
6892 {
6893 a1 = TREE_OPERAND (ineq, 1);
6894 y = TREE_OPERAND (ineq, 0);
6895 }
6896 else if (TREE_CODE (ineq) == GT_EXPR)
6897 {
6898 a1 = TREE_OPERAND (ineq, 0);
6899 y = TREE_OPERAND (ineq, 1);
6900 }
6901 else
6902 return NULL_TREE;
6903
6904 if (TREE_TYPE (a1) != typea)
6905 return NULL_TREE;
6906
6907 if (POINTER_TYPE_P (typea))
6908 {
6909 /* Convert the pointer types into integer before taking the difference. */
6910 tree ta = fold_convert_loc (loc, ssizetype, a);
6911 tree ta1 = fold_convert_loc (loc, ssizetype, a1);
6912 diff = fold_binary_loc (loc, MINUS_EXPR, ssizetype, ta1, ta);
6913 }
6914 else
6915 diff = fold_binary_loc (loc, MINUS_EXPR, typea, a1, a);
6916
6917 if (!diff || !integer_onep (diff))
6918 return NULL_TREE;
6919
6920 return fold_build2_loc (loc, GE_EXPR, type, a, y);
6921 }
6922
6923 /* Fold a sum or difference of at least one multiplication.
6924 Returns the folded tree or NULL if no simplification could be made. */
6925
6926 static tree
6927 fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type,
6928 tree arg0, tree arg1)
6929 {
6930 tree arg00, arg01, arg10, arg11;
6931 tree alt0 = NULL_TREE, alt1 = NULL_TREE, same;
6932
6933 /* (A * C) +- (B * C) -> (A+-B) * C.
6934 (A * C) +- A -> A * (C+-1).
6935 We are most concerned about the case where C is a constant,
6936 but other combinations show up during loop reduction. Since
6937 it is not difficult, try all four possibilities. */
6938
6939 if (TREE_CODE (arg0) == MULT_EXPR)
6940 {
6941 arg00 = TREE_OPERAND (arg0, 0);
6942 arg01 = TREE_OPERAND (arg0, 1);
6943 }
6944 else if (TREE_CODE (arg0) == INTEGER_CST)
6945 {
6946 arg00 = build_one_cst (type);
6947 arg01 = arg0;
6948 }
6949 else
6950 {
6951 /* We cannot generate constant 1 for fract. */
6952 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6953 return NULL_TREE;
6954 arg00 = arg0;
6955 arg01 = build_one_cst (type);
6956 }
6957 if (TREE_CODE (arg1) == MULT_EXPR)
6958 {
6959 arg10 = TREE_OPERAND (arg1, 0);
6960 arg11 = TREE_OPERAND (arg1, 1);
6961 }
6962 else if (TREE_CODE (arg1) == INTEGER_CST)
6963 {
6964 arg10 = build_one_cst (type);
6965 /* As we canonicalize A - 2 to A + -2 get rid of that sign for
6966 the purpose of this canonicalization. */
6967 if (wi::neg_p (arg1, TYPE_SIGN (TREE_TYPE (arg1)))
6968 && negate_expr_p (arg1)
6969 && code == PLUS_EXPR)
6970 {
6971 arg11 = negate_expr (arg1);
6972 code = MINUS_EXPR;
6973 }
6974 else
6975 arg11 = arg1;
6976 }
6977 else
6978 {
6979 /* We cannot generate constant 1 for fract. */
6980 if (ALL_FRACT_MODE_P (TYPE_MODE (type)))
6981 return NULL_TREE;
6982 arg10 = arg1;
6983 arg11 = build_one_cst (type);
6984 }
6985 same = NULL_TREE;
6986
6987 if (operand_equal_p (arg01, arg11, 0))
6988 same = arg01, alt0 = arg00, alt1 = arg10;
6989 else if (operand_equal_p (arg00, arg10, 0))
6990 same = arg00, alt0 = arg01, alt1 = arg11;
6991 else if (operand_equal_p (arg00, arg11, 0))
6992 same = arg00, alt0 = arg01, alt1 = arg10;
6993 else if (operand_equal_p (arg01, arg10, 0))
6994 same = arg01, alt0 = arg00, alt1 = arg11;
6995
6996 /* No identical multiplicands; see if we can find a common
6997 power-of-two factor in non-power-of-two multiplies. This
6998 can help in multi-dimensional array access. */
6999 else if (tree_fits_shwi_p (arg01)
7000 && tree_fits_shwi_p (arg11))
7001 {
7002 HOST_WIDE_INT int01, int11, tmp;
7003 bool swap = false;
7004 tree maybe_same;
7005 int01 = tree_to_shwi (arg01);
7006 int11 = tree_to_shwi (arg11);
7007
7008 /* Move min of absolute values to int11. */
7009 if (absu_hwi (int01) < absu_hwi (int11))
7010 {
7011 tmp = int01, int01 = int11, int11 = tmp;
7012 alt0 = arg00, arg00 = arg10, arg10 = alt0;
7013 maybe_same = arg01;
7014 swap = true;
7015 }
7016 else
7017 maybe_same = arg11;
7018
7019 if (exact_log2 (absu_hwi (int11)) > 0 && int01 % int11 == 0
7020 /* The remainder should not be a constant, otherwise we
7021 end up folding i * 4 + 2 to (i * 2 + 1) * 2 which has
7022 increased the number of multiplications necessary. */
7023 && TREE_CODE (arg10) != INTEGER_CST)
7024 {
7025 alt0 = fold_build2_loc (loc, MULT_EXPR, TREE_TYPE (arg00), arg00,
7026 build_int_cst (TREE_TYPE (arg00),
7027 int01 / int11));
7028 alt1 = arg10;
7029 same = maybe_same;
7030 if (swap)
7031 maybe_same = alt0, alt0 = alt1, alt1 = maybe_same;
7032 }
7033 }
7034
7035 if (same)
7036 return fold_build2_loc (loc, MULT_EXPR, type,
7037 fold_build2_loc (loc, code, type,
7038 fold_convert_loc (loc, type, alt0),
7039 fold_convert_loc (loc, type, alt1)),
7040 fold_convert_loc (loc, type, same));
7041
7042 return NULL_TREE;
7043 }
7044
7045 /* Subroutine of native_encode_expr. Encode the INTEGER_CST
7046 specified by EXPR into the buffer PTR of length LEN bytes.
7047 Return the number of bytes placed in the buffer, or zero
7048 upon failure. */
7049
7050 static int
7051 native_encode_int (const_tree expr, unsigned char *ptr, int len, int off)
7052 {
7053 tree type = TREE_TYPE (expr);
7054 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7055 int byte, offset, word, words;
7056 unsigned char value;
7057
7058 if ((off == -1 && total_bytes > len)
7059 || off >= total_bytes)
7060 return 0;
7061 if (off == -1)
7062 off = 0;
7063 words = total_bytes / UNITS_PER_WORD;
7064
7065 for (byte = 0; byte < total_bytes; byte++)
7066 {
7067 int bitpos = byte * BITS_PER_UNIT;
7068 /* Extend EXPR according to TYPE_SIGN if the precision isn't a whole
7069 number of bytes. */
7070 value = wi::extract_uhwi (wi::to_widest (expr), bitpos, BITS_PER_UNIT);
7071
7072 if (total_bytes > UNITS_PER_WORD)
7073 {
7074 word = byte / UNITS_PER_WORD;
7075 if (WORDS_BIG_ENDIAN)
7076 word = (words - 1) - word;
7077 offset = word * UNITS_PER_WORD;
7078 if (BYTES_BIG_ENDIAN)
7079 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7080 else
7081 offset += byte % UNITS_PER_WORD;
7082 }
7083 else
7084 offset = BYTES_BIG_ENDIAN ? (total_bytes - 1) - byte : byte;
7085 if (offset >= off
7086 && offset - off < len)
7087 ptr[offset - off] = value;
7088 }
7089 return MIN (len, total_bytes - off);
7090 }
7091
7092
7093 /* Subroutine of native_encode_expr. Encode the FIXED_CST
7094 specified by EXPR into the buffer PTR of length LEN bytes.
7095 Return the number of bytes placed in the buffer, or zero
7096 upon failure. */
7097
7098 static int
7099 native_encode_fixed (const_tree expr, unsigned char *ptr, int len, int off)
7100 {
7101 tree type = TREE_TYPE (expr);
7102 machine_mode mode = TYPE_MODE (type);
7103 int total_bytes = GET_MODE_SIZE (mode);
7104 FIXED_VALUE_TYPE value;
7105 tree i_value, i_type;
7106
7107 if (total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7108 return 0;
7109
7110 i_type = lang_hooks.types.type_for_size (GET_MODE_BITSIZE (mode), 1);
7111
7112 if (NULL_TREE == i_type
7113 || TYPE_PRECISION (i_type) != total_bytes)
7114 return 0;
7115
7116 value = TREE_FIXED_CST (expr);
7117 i_value = double_int_to_tree (i_type, value.data);
7118
7119 return native_encode_int (i_value, ptr, len, off);
7120 }
7121
7122
7123 /* Subroutine of native_encode_expr. Encode the REAL_CST
7124 specified by EXPR into the buffer PTR of length LEN bytes.
7125 Return the number of bytes placed in the buffer, or zero
7126 upon failure. */
7127
7128 static int
7129 native_encode_real (const_tree expr, unsigned char *ptr, int len, int off)
7130 {
7131 tree type = TREE_TYPE (expr);
7132 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7133 int byte, offset, word, words, bitpos;
7134 unsigned char value;
7135
7136 /* There are always 32 bits in each long, no matter the size of
7137 the hosts long. We handle floating point representations with
7138 up to 192 bits. */
7139 long tmp[6];
7140
7141 if ((off == -1 && total_bytes > len)
7142 || off >= total_bytes)
7143 return 0;
7144 if (off == -1)
7145 off = 0;
7146 words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7147
7148 real_to_target (tmp, TREE_REAL_CST_PTR (expr), TYPE_MODE (type));
7149
7150 for (bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7151 bitpos += BITS_PER_UNIT)
7152 {
7153 byte = (bitpos / BITS_PER_UNIT) & 3;
7154 value = (unsigned char) (tmp[bitpos / 32] >> (bitpos & 31));
7155
7156 if (UNITS_PER_WORD < 4)
7157 {
7158 word = byte / UNITS_PER_WORD;
7159 if (WORDS_BIG_ENDIAN)
7160 word = (words - 1) - word;
7161 offset = word * UNITS_PER_WORD;
7162 if (BYTES_BIG_ENDIAN)
7163 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7164 else
7165 offset += byte % UNITS_PER_WORD;
7166 }
7167 else
7168 offset = BYTES_BIG_ENDIAN ? 3 - byte : byte;
7169 offset = offset + ((bitpos / BITS_PER_UNIT) & ~3);
7170 if (offset >= off
7171 && offset - off < len)
7172 ptr[offset - off] = value;
7173 }
7174 return MIN (len, total_bytes - off);
7175 }
7176
7177 /* Subroutine of native_encode_expr. Encode the COMPLEX_CST
7178 specified by EXPR into the buffer PTR of length LEN bytes.
7179 Return the number of bytes placed in the buffer, or zero
7180 upon failure. */
7181
7182 static int
7183 native_encode_complex (const_tree expr, unsigned char *ptr, int len, int off)
7184 {
7185 int rsize, isize;
7186 tree part;
7187
7188 part = TREE_REALPART (expr);
7189 rsize = native_encode_expr (part, ptr, len, off);
7190 if (off == -1
7191 && rsize == 0)
7192 return 0;
7193 part = TREE_IMAGPART (expr);
7194 if (off != -1)
7195 off = MAX (0, off - GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (part))));
7196 isize = native_encode_expr (part, ptr+rsize, len-rsize, off);
7197 if (off == -1
7198 && isize != rsize)
7199 return 0;
7200 return rsize + isize;
7201 }
7202
7203
7204 /* Subroutine of native_encode_expr. Encode the VECTOR_CST
7205 specified by EXPR into the buffer PTR of length LEN bytes.
7206 Return the number of bytes placed in the buffer, or zero
7207 upon failure. */
7208
7209 static int
7210 native_encode_vector (const_tree expr, unsigned char *ptr, int len, int off)
7211 {
7212 unsigned i, count;
7213 int size, offset;
7214 tree itype, elem;
7215
7216 offset = 0;
7217 count = VECTOR_CST_NELTS (expr);
7218 itype = TREE_TYPE (TREE_TYPE (expr));
7219 size = GET_MODE_SIZE (TYPE_MODE (itype));
7220 for (i = 0; i < count; i++)
7221 {
7222 if (off >= size)
7223 {
7224 off -= size;
7225 continue;
7226 }
7227 elem = VECTOR_CST_ELT (expr, i);
7228 int res = native_encode_expr (elem, ptr+offset, len-offset, off);
7229 if ((off == -1 && res != size)
7230 || res == 0)
7231 return 0;
7232 offset += res;
7233 if (offset >= len)
7234 return offset;
7235 if (off != -1)
7236 off = 0;
7237 }
7238 return offset;
7239 }
7240
7241
7242 /* Subroutine of native_encode_expr. Encode the STRING_CST
7243 specified by EXPR into the buffer PTR of length LEN bytes.
7244 Return the number of bytes placed in the buffer, or zero
7245 upon failure. */
7246
7247 static int
7248 native_encode_string (const_tree expr, unsigned char *ptr, int len, int off)
7249 {
7250 tree type = TREE_TYPE (expr);
7251 HOST_WIDE_INT total_bytes;
7252
7253 if (TREE_CODE (type) != ARRAY_TYPE
7254 || TREE_CODE (TREE_TYPE (type)) != INTEGER_TYPE
7255 || GET_MODE_BITSIZE (TYPE_MODE (TREE_TYPE (type))) != BITS_PER_UNIT
7256 || !tree_fits_shwi_p (TYPE_SIZE_UNIT (type)))
7257 return 0;
7258 total_bytes = tree_to_shwi (TYPE_SIZE_UNIT (type));
7259 if ((off == -1 && total_bytes > len)
7260 || off >= total_bytes)
7261 return 0;
7262 if (off == -1)
7263 off = 0;
7264 if (TREE_STRING_LENGTH (expr) - off < MIN (total_bytes, len))
7265 {
7266 int written = 0;
7267 if (off < TREE_STRING_LENGTH (expr))
7268 {
7269 written = MIN (len, TREE_STRING_LENGTH (expr) - off);
7270 memcpy (ptr, TREE_STRING_POINTER (expr) + off, written);
7271 }
7272 memset (ptr + written, 0,
7273 MIN (total_bytes - written, len - written));
7274 }
7275 else
7276 memcpy (ptr, TREE_STRING_POINTER (expr) + off, MIN (total_bytes, len));
7277 return MIN (total_bytes - off, len);
7278 }
7279
7280
7281 /* Subroutine of fold_view_convert_expr. Encode the INTEGER_CST,
7282 REAL_CST, COMPLEX_CST or VECTOR_CST specified by EXPR into the
7283 buffer PTR of length LEN bytes. If OFF is not -1 then start
7284 the encoding at byte offset OFF and encode at most LEN bytes.
7285 Return the number of bytes placed in the buffer, or zero upon failure. */
7286
7287 int
7288 native_encode_expr (const_tree expr, unsigned char *ptr, int len, int off)
7289 {
7290 /* We don't support starting at negative offset and -1 is special. */
7291 if (off < -1)
7292 return 0;
7293
7294 switch (TREE_CODE (expr))
7295 {
7296 case INTEGER_CST:
7297 return native_encode_int (expr, ptr, len, off);
7298
7299 case REAL_CST:
7300 return native_encode_real (expr, ptr, len, off);
7301
7302 case FIXED_CST:
7303 return native_encode_fixed (expr, ptr, len, off);
7304
7305 case COMPLEX_CST:
7306 return native_encode_complex (expr, ptr, len, off);
7307
7308 case VECTOR_CST:
7309 return native_encode_vector (expr, ptr, len, off);
7310
7311 case STRING_CST:
7312 return native_encode_string (expr, ptr, len, off);
7313
7314 default:
7315 return 0;
7316 }
7317 }
7318
7319
7320 /* Subroutine of native_interpret_expr. Interpret the contents of
7321 the buffer PTR of length LEN as an INTEGER_CST of type TYPE.
7322 If the buffer cannot be interpreted, return NULL_TREE. */
7323
7324 static tree
7325 native_interpret_int (tree type, const unsigned char *ptr, int len)
7326 {
7327 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7328
7329 if (total_bytes > len
7330 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7331 return NULL_TREE;
7332
7333 wide_int result = wi::from_buffer (ptr, total_bytes);
7334
7335 return wide_int_to_tree (type, result);
7336 }
7337
7338
7339 /* Subroutine of native_interpret_expr. Interpret the contents of
7340 the buffer PTR of length LEN as a FIXED_CST of type TYPE.
7341 If the buffer cannot be interpreted, return NULL_TREE. */
7342
7343 static tree
7344 native_interpret_fixed (tree type, const unsigned char *ptr, int len)
7345 {
7346 int total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7347 double_int result;
7348 FIXED_VALUE_TYPE fixed_value;
7349
7350 if (total_bytes > len
7351 || total_bytes * BITS_PER_UNIT > HOST_BITS_PER_DOUBLE_INT)
7352 return NULL_TREE;
7353
7354 result = double_int::from_buffer (ptr, total_bytes);
7355 fixed_value = fixed_from_double_int (result, TYPE_MODE (type));
7356
7357 return build_fixed (type, fixed_value);
7358 }
7359
7360
7361 /* Subroutine of native_interpret_expr. Interpret the contents of
7362 the buffer PTR of length LEN as a REAL_CST of type TYPE.
7363 If the buffer cannot be interpreted, return NULL_TREE. */
7364
7365 static tree
7366 native_interpret_real (tree type, const unsigned char *ptr, int len)
7367 {
7368 machine_mode mode = TYPE_MODE (type);
7369 int total_bytes = GET_MODE_SIZE (mode);
7370 unsigned char value;
7371 /* There are always 32 bits in each long, no matter the size of
7372 the hosts long. We handle floating point representations with
7373 up to 192 bits. */
7374 REAL_VALUE_TYPE r;
7375 long tmp[6];
7376
7377 total_bytes = GET_MODE_SIZE (TYPE_MODE (type));
7378 if (total_bytes > len || total_bytes > 24)
7379 return NULL_TREE;
7380 int words = (32 / BITS_PER_UNIT) / UNITS_PER_WORD;
7381
7382 memset (tmp, 0, sizeof (tmp));
7383 for (int bitpos = 0; bitpos < total_bytes * BITS_PER_UNIT;
7384 bitpos += BITS_PER_UNIT)
7385 {
7386 /* Both OFFSET and BYTE index within a long;
7387 bitpos indexes the whole float. */
7388 int offset, byte = (bitpos / BITS_PER_UNIT) & 3;
7389 if (UNITS_PER_WORD < 4)
7390 {
7391 int word = byte / UNITS_PER_WORD;
7392 if (WORDS_BIG_ENDIAN)
7393 word = (words - 1) - word;
7394 offset = word * UNITS_PER_WORD;
7395 if (BYTES_BIG_ENDIAN)
7396 offset += (UNITS_PER_WORD - 1) - (byte % UNITS_PER_WORD);
7397 else
7398 offset += byte % UNITS_PER_WORD;
7399 }
7400 else
7401 {
7402 offset = byte;
7403 if (BYTES_BIG_ENDIAN)
7404 {
7405 /* Reverse bytes within each long, or within the entire float
7406 if it's smaller than a long (for HFmode). */
7407 offset = MIN (3, total_bytes - 1) - offset;
7408 gcc_assert (offset >= 0);
7409 }
7410 }
7411 value = ptr[offset + ((bitpos / BITS_PER_UNIT) & ~3)];
7412
7413 tmp[bitpos / 32] |= (unsigned long)value << (bitpos & 31);
7414 }
7415
7416 real_from_target (&r, tmp, mode);
7417 return build_real (type, r);
7418 }
7419
7420
7421 /* Subroutine of native_interpret_expr. Interpret the contents of
7422 the buffer PTR of length LEN as a COMPLEX_CST of type TYPE.
7423 If the buffer cannot be interpreted, return NULL_TREE. */
7424
7425 static tree
7426 native_interpret_complex (tree type, const unsigned char *ptr, int len)
7427 {
7428 tree etype, rpart, ipart;
7429 int size;
7430
7431 etype = TREE_TYPE (type);
7432 size = GET_MODE_SIZE (TYPE_MODE (etype));
7433 if (size * 2 > len)
7434 return NULL_TREE;
7435 rpart = native_interpret_expr (etype, ptr, size);
7436 if (!rpart)
7437 return NULL_TREE;
7438 ipart = native_interpret_expr (etype, ptr+size, size);
7439 if (!ipart)
7440 return NULL_TREE;
7441 return build_complex (type, rpart, ipart);
7442 }
7443
7444
7445 /* Subroutine of native_interpret_expr. Interpret the contents of
7446 the buffer PTR of length LEN as a VECTOR_CST of type TYPE.
7447 If the buffer cannot be interpreted, return NULL_TREE. */
7448
7449 static tree
7450 native_interpret_vector (tree type, const unsigned char *ptr, int len)
7451 {
7452 tree etype, elem;
7453 int i, size, count;
7454 tree *elements;
7455
7456 etype = TREE_TYPE (type);
7457 size = GET_MODE_SIZE (TYPE_MODE (etype));
7458 count = TYPE_VECTOR_SUBPARTS (type);
7459 if (size * count > len)
7460 return NULL_TREE;
7461
7462 elements = XALLOCAVEC (tree, count);
7463 for (i = count - 1; i >= 0; i--)
7464 {
7465 elem = native_interpret_expr (etype, ptr+(i*size), size);
7466 if (!elem)
7467 return NULL_TREE;
7468 elements[i] = elem;
7469 }
7470 return build_vector (type, elements);
7471 }
7472
7473
7474 /* Subroutine of fold_view_convert_expr. Interpret the contents of
7475 the buffer PTR of length LEN as a constant of type TYPE. For
7476 INTEGRAL_TYPE_P we return an INTEGER_CST, for SCALAR_FLOAT_TYPE_P
7477 we return a REAL_CST, etc... If the buffer cannot be interpreted,
7478 return NULL_TREE. */
7479
7480 tree
7481 native_interpret_expr (tree type, const unsigned char *ptr, int len)
7482 {
7483 switch (TREE_CODE (type))
7484 {
7485 case INTEGER_TYPE:
7486 case ENUMERAL_TYPE:
7487 case BOOLEAN_TYPE:
7488 case POINTER_TYPE:
7489 case REFERENCE_TYPE:
7490 return native_interpret_int (type, ptr, len);
7491
7492 case REAL_TYPE:
7493 return native_interpret_real (type, ptr, len);
7494
7495 case FIXED_POINT_TYPE:
7496 return native_interpret_fixed (type, ptr, len);
7497
7498 case COMPLEX_TYPE:
7499 return native_interpret_complex (type, ptr, len);
7500
7501 case VECTOR_TYPE:
7502 return native_interpret_vector (type, ptr, len);
7503
7504 default:
7505 return NULL_TREE;
7506 }
7507 }
7508
7509 /* Returns true if we can interpret the contents of a native encoding
7510 as TYPE. */
7511
7512 static bool
7513 can_native_interpret_type_p (tree type)
7514 {
7515 switch (TREE_CODE (type))
7516 {
7517 case INTEGER_TYPE:
7518 case ENUMERAL_TYPE:
7519 case BOOLEAN_TYPE:
7520 case POINTER_TYPE:
7521 case REFERENCE_TYPE:
7522 case FIXED_POINT_TYPE:
7523 case REAL_TYPE:
7524 case COMPLEX_TYPE:
7525 case VECTOR_TYPE:
7526 return true;
7527 default:
7528 return false;
7529 }
7530 }
7531
7532 /* Fold a VIEW_CONVERT_EXPR of a constant expression EXPR to type
7533 TYPE at compile-time. If we're unable to perform the conversion
7534 return NULL_TREE. */
7535
7536 static tree
7537 fold_view_convert_expr (tree type, tree expr)
7538 {
7539 /* We support up to 512-bit values (for V8DFmode). */
7540 unsigned char buffer[64];
7541 int len;
7542
7543 /* Check that the host and target are sane. */
7544 if (CHAR_BIT != 8 || BITS_PER_UNIT != 8)
7545 return NULL_TREE;
7546
7547 len = native_encode_expr (expr, buffer, sizeof (buffer));
7548 if (len == 0)
7549 return NULL_TREE;
7550
7551 return native_interpret_expr (type, buffer, len);
7552 }
7553
7554 /* Build an expression for the address of T. Folds away INDIRECT_REF
7555 to avoid confusing the gimplify process. */
7556
7557 tree
7558 build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
7559 {
7560 /* The size of the object is not relevant when talking about its address. */
7561 if (TREE_CODE (t) == WITH_SIZE_EXPR)
7562 t = TREE_OPERAND (t, 0);
7563
7564 if (TREE_CODE (t) == INDIRECT_REF)
7565 {
7566 t = TREE_OPERAND (t, 0);
7567
7568 if (TREE_TYPE (t) != ptrtype)
7569 t = build1_loc (loc, NOP_EXPR, ptrtype, t);
7570 }
7571 else if (TREE_CODE (t) == MEM_REF
7572 && integer_zerop (TREE_OPERAND (t, 1)))
7573 return TREE_OPERAND (t, 0);
7574 else if (TREE_CODE (t) == MEM_REF
7575 && TREE_CODE (TREE_OPERAND (t, 0)) == INTEGER_CST)
7576 return fold_binary (POINTER_PLUS_EXPR, ptrtype,
7577 TREE_OPERAND (t, 0),
7578 convert_to_ptrofftype (TREE_OPERAND (t, 1)));
7579 else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
7580 {
7581 t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
7582
7583 if (TREE_TYPE (t) != ptrtype)
7584 t = fold_convert_loc (loc, ptrtype, t);
7585 }
7586 else
7587 t = build1_loc (loc, ADDR_EXPR, ptrtype, t);
7588
7589 return t;
7590 }
7591
7592 /* Build an expression for the address of T. */
7593
7594 tree
7595 build_fold_addr_expr_loc (location_t loc, tree t)
7596 {
7597 tree ptrtype = build_pointer_type (TREE_TYPE (t));
7598
7599 return build_fold_addr_expr_with_type_loc (loc, t, ptrtype);
7600 }
7601
7602 /* Fold a unary expression of code CODE and type TYPE with operand
7603 OP0. Return the folded expression if folding is successful.
7604 Otherwise, return NULL_TREE. */
7605
7606 tree
7607 fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
7608 {
7609 tree tem;
7610 tree arg0;
7611 enum tree_code_class kind = TREE_CODE_CLASS (code);
7612
7613 gcc_assert (IS_EXPR_CODE_CLASS (kind)
7614 && TREE_CODE_LENGTH (code) == 1);
7615
7616 arg0 = op0;
7617 if (arg0)
7618 {
7619 if (CONVERT_EXPR_CODE_P (code)
7620 || code == FLOAT_EXPR || code == ABS_EXPR || code == NEGATE_EXPR)
7621 {
7622 /* Don't use STRIP_NOPS, because signedness of argument type
7623 matters. */
7624 STRIP_SIGN_NOPS (arg0);
7625 }
7626 else
7627 {
7628 /* Strip any conversions that don't change the mode. This
7629 is safe for every expression, except for a comparison
7630 expression because its signedness is derived from its
7631 operands.
7632
7633 Note that this is done as an internal manipulation within
7634 the constant folder, in order to find the simplest
7635 representation of the arguments so that their form can be
7636 studied. In any cases, the appropriate type conversions
7637 should be put back in the tree that will get out of the
7638 constant folder. */
7639 STRIP_NOPS (arg0);
7640 }
7641
7642 if (CONSTANT_CLASS_P (arg0))
7643 {
7644 tree tem = const_unop (code, type, arg0);
7645 if (tem)
7646 {
7647 if (TREE_TYPE (tem) != type)
7648 tem = fold_convert_loc (loc, type, tem);
7649 return tem;
7650 }
7651 }
7652 }
7653
7654 tem = generic_simplify (loc, code, type, op0);
7655 if (tem)
7656 return tem;
7657
7658 if (TREE_CODE_CLASS (code) == tcc_unary)
7659 {
7660 if (TREE_CODE (arg0) == COMPOUND_EXPR)
7661 return build2 (COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
7662 fold_build1_loc (loc, code, type,
7663 fold_convert_loc (loc, TREE_TYPE (op0),
7664 TREE_OPERAND (arg0, 1))));
7665 else if (TREE_CODE (arg0) == COND_EXPR)
7666 {
7667 tree arg01 = TREE_OPERAND (arg0, 1);
7668 tree arg02 = TREE_OPERAND (arg0, 2);
7669 if (! VOID_TYPE_P (TREE_TYPE (arg01)))
7670 arg01 = fold_build1_loc (loc, code, type,
7671 fold_convert_loc (loc,
7672 TREE_TYPE (op0), arg01));
7673 if (! VOID_TYPE_P (TREE_TYPE (arg02)))
7674 arg02 = fold_build1_loc (loc, code, type,
7675 fold_convert_loc (loc,
7676 TREE_TYPE (op0), arg02));
7677 tem = fold_build3_loc (loc, COND_EXPR, type, TREE_OPERAND (arg0, 0),
7678 arg01, arg02);
7679
7680 /* If this was a conversion, and all we did was to move into
7681 inside the COND_EXPR, bring it back out. But leave it if
7682 it is a conversion from integer to integer and the
7683 result precision is no wider than a word since such a
7684 conversion is cheap and may be optimized away by combine,
7685 while it couldn't if it were outside the COND_EXPR. Then return
7686 so we don't get into an infinite recursion loop taking the
7687 conversion out and then back in. */
7688
7689 if ((CONVERT_EXPR_CODE_P (code)
7690 || code == NON_LVALUE_EXPR)
7691 && TREE_CODE (tem) == COND_EXPR
7692 && TREE_CODE (TREE_OPERAND (tem, 1)) == code
7693 && TREE_CODE (TREE_OPERAND (tem, 2)) == code
7694 && ! VOID_TYPE_P (TREE_OPERAND (tem, 1))
7695 && ! VOID_TYPE_P (TREE_OPERAND (tem, 2))
7696 && (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))
7697 == TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 2), 0)))
7698 && (! (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7699 && (INTEGRAL_TYPE_P
7700 (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (tem, 1), 0))))
7701 && TYPE_PRECISION (TREE_TYPE (tem)) <= BITS_PER_WORD)
7702 || flag_syntax_only))
7703 tem = build1_loc (loc, code, type,
7704 build3 (COND_EXPR,
7705 TREE_TYPE (TREE_OPERAND
7706 (TREE_OPERAND (tem, 1), 0)),
7707 TREE_OPERAND (tem, 0),
7708 TREE_OPERAND (TREE_OPERAND (tem, 1), 0),
7709 TREE_OPERAND (TREE_OPERAND (tem, 2),
7710 0)));
7711 return tem;
7712 }
7713 }
7714
7715 switch (code)
7716 {
7717 case NON_LVALUE_EXPR:
7718 if (!maybe_lvalue_p (op0))
7719 return fold_convert_loc (loc, type, op0);
7720 return NULL_TREE;
7721
7722 CASE_CONVERT:
7723 case FLOAT_EXPR:
7724 case FIX_TRUNC_EXPR:
7725 if (COMPARISON_CLASS_P (op0))
7726 {
7727 /* If we have (type) (a CMP b) and type is an integral type, return
7728 new expression involving the new type. Canonicalize
7729 (type) (a CMP b) to (a CMP b) ? (type) true : (type) false for
7730 non-integral type.
7731 Do not fold the result as that would not simplify further, also
7732 folding again results in recursions. */
7733 if (TREE_CODE (type) == BOOLEAN_TYPE)
7734 return build2_loc (loc, TREE_CODE (op0), type,
7735 TREE_OPERAND (op0, 0),
7736 TREE_OPERAND (op0, 1));
7737 else if (!INTEGRAL_TYPE_P (type) && !VOID_TYPE_P (type)
7738 && TREE_CODE (type) != VECTOR_TYPE)
7739 return build3_loc (loc, COND_EXPR, type, op0,
7740 constant_boolean_node (true, type),
7741 constant_boolean_node (false, type));
7742 }
7743
7744 /* Handle (T *)&A.B.C for A being of type T and B and C
7745 living at offset zero. This occurs frequently in
7746 C++ upcasting and then accessing the base. */
7747 if (TREE_CODE (op0) == ADDR_EXPR
7748 && POINTER_TYPE_P (type)
7749 && handled_component_p (TREE_OPERAND (op0, 0)))
7750 {
7751 HOST_WIDE_INT bitsize, bitpos;
7752 tree offset;
7753 machine_mode mode;
7754 int unsignedp, reversep, volatilep;
7755 tree base
7756 = get_inner_reference (TREE_OPERAND (op0, 0), &bitsize, &bitpos,
7757 &offset, &mode, &unsignedp, &reversep,
7758 &volatilep, false);
7759 /* If the reference was to a (constant) zero offset, we can use
7760 the address of the base if it has the same base type
7761 as the result type and the pointer type is unqualified. */
7762 if (! offset && bitpos == 0
7763 && (TYPE_MAIN_VARIANT (TREE_TYPE (type))
7764 == TYPE_MAIN_VARIANT (TREE_TYPE (base)))
7765 && TYPE_QUALS (type) == TYPE_UNQUALIFIED)
7766 return fold_convert_loc (loc, type,
7767 build_fold_addr_expr_loc (loc, base));
7768 }
7769
7770 if (TREE_CODE (op0) == MODIFY_EXPR
7771 && TREE_CONSTANT (TREE_OPERAND (op0, 1))
7772 /* Detect assigning a bitfield. */
7773 && !(TREE_CODE (TREE_OPERAND (op0, 0)) == COMPONENT_REF
7774 && DECL_BIT_FIELD
7775 (TREE_OPERAND (TREE_OPERAND (op0, 0), 1))))
7776 {
7777 /* Don't leave an assignment inside a conversion
7778 unless assigning a bitfield. */
7779 tem = fold_build1_loc (loc, code, type, TREE_OPERAND (op0, 1));
7780 /* First do the assignment, then return converted constant. */
7781 tem = build2_loc (loc, COMPOUND_EXPR, TREE_TYPE (tem), op0, tem);
7782 TREE_NO_WARNING (tem) = 1;
7783 TREE_USED (tem) = 1;
7784 return tem;
7785 }
7786
7787 /* Convert (T)(x & c) into (T)x & (T)c, if c is an integer
7788 constants (if x has signed type, the sign bit cannot be set
7789 in c). This folds extension into the BIT_AND_EXPR.
7790 ??? We don't do it for BOOLEAN_TYPE or ENUMERAL_TYPE because they
7791 very likely don't have maximal range for their precision and this
7792 transformation effectively doesn't preserve non-maximal ranges. */
7793 if (TREE_CODE (type) == INTEGER_TYPE
7794 && TREE_CODE (op0) == BIT_AND_EXPR
7795 && TREE_CODE (TREE_OPERAND (op0, 1)) == INTEGER_CST)
7796 {
7797 tree and_expr = op0;
7798 tree and0 = TREE_OPERAND (and_expr, 0);
7799 tree and1 = TREE_OPERAND (and_expr, 1);
7800 int change = 0;
7801
7802 if (TYPE_UNSIGNED (TREE_TYPE (and_expr))
7803 || (TYPE_PRECISION (type)
7804 <= TYPE_PRECISION (TREE_TYPE (and_expr))))
7805 change = 1;
7806 else if (TYPE_PRECISION (TREE_TYPE (and1))
7807 <= HOST_BITS_PER_WIDE_INT
7808 && tree_fits_uhwi_p (and1))
7809 {
7810 unsigned HOST_WIDE_INT cst;
7811
7812 cst = tree_to_uhwi (and1);
7813 cst &= HOST_WIDE_INT_M1U
7814 << (TYPE_PRECISION (TREE_TYPE (and1)) - 1);
7815 change = (cst == 0);
7816 if (change
7817 && !flag_syntax_only
7818 && (LOAD_EXTEND_OP (TYPE_MODE (TREE_TYPE (and0)))
7819 == ZERO_EXTEND))
7820 {
7821 tree uns = unsigned_type_for (TREE_TYPE (and0));
7822 and0 = fold_convert_loc (loc, uns, and0);
7823 and1 = fold_convert_loc (loc, uns, and1);
7824 }
7825 }
7826 if (change)
7827 {
7828 tem = force_fit_type (type, wi::to_widest (and1), 0,
7829 TREE_OVERFLOW (and1));
7830 return fold_build2_loc (loc, BIT_AND_EXPR, type,
7831 fold_convert_loc (loc, type, and0), tem);
7832 }
7833 }
7834
7835 /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type, when the new
7836 cast (T1)X will fold away. We assume that this happens when X itself
7837 is a cast. */
7838 if (POINTER_TYPE_P (type)
7839 && TREE_CODE (arg0) == POINTER_PLUS_EXPR
7840 && CONVERT_EXPR_P (TREE_OPERAND (arg0, 0)))
7841 {
7842 tree arg00 = TREE_OPERAND (arg0, 0);
7843 tree arg01 = TREE_OPERAND (arg0, 1);
7844
7845 return fold_build_pointer_plus_loc
7846 (loc, fold_convert_loc (loc, type, arg00), arg01);
7847 }
7848
7849 /* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
7850 of the same precision, and X is an integer type not narrower than
7851 types T1 or T2, i.e. the cast (T2)X isn't an extension. */
7852 if (INTEGRAL_TYPE_P (type)
7853 && TREE_CODE (op0) == BIT_NOT_EXPR
7854 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7855 && CONVERT_EXPR_P (TREE_OPERAND (op0, 0))
7856 && TYPE_PRECISION (type) == TYPE_PRECISION (TREE_TYPE (op0)))
7857 {
7858 tem = TREE_OPERAND (TREE_OPERAND (op0, 0), 0);
7859 if (INTEGRAL_TYPE_P (TREE_TYPE (tem))
7860 && TYPE_PRECISION (type) <= TYPE_PRECISION (TREE_TYPE (tem)))
7861 return fold_build1_loc (loc, BIT_NOT_EXPR, type,
7862 fold_convert_loc (loc, type, tem));
7863 }
7864
7865 /* Convert (T1)(X * Y) into (T1)X * (T1)Y if T1 is narrower than the
7866 type of X and Y (integer types only). */
7867 if (INTEGRAL_TYPE_P (type)
7868 && TREE_CODE (op0) == MULT_EXPR
7869 && INTEGRAL_TYPE_P (TREE_TYPE (op0))
7870 && TYPE_PRECISION (type) < TYPE_PRECISION (TREE_TYPE (op0)))
7871 {
7872 /* Be careful not to introduce new overflows. */
7873 tree mult_type;
7874 if (TYPE_OVERFLOW_WRAPS (type))
7875 mult_type = type;
7876 else
7877 mult_type = unsigned_type_for (type);
7878
7879 if (TYPE_PRECISION (mult_type) < TYPE_PRECISION (TREE_TYPE (op0)))
7880 {
7881 tem = fold_build2_loc (loc, MULT_EXPR, mult_type,
7882 fold_convert_loc (loc, mult_type,
7883 TREE_OPERAND (op0, 0)),
7884 fold_convert_loc (loc, mult_type,
7885 TREE_OPERAND (op0, 1)));
7886 return fold_convert_loc (loc, type, tem);
7887 }
7888 }
7889
7890 return NULL_TREE;
7891
7892 case VIEW_CONVERT_EXPR:
7893 if (TREE_CODE (op0) == MEM_REF)
7894 {
7895 tem = fold_build2_loc (loc, MEM_REF, type,
7896 TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
7897 REF_REVERSE_STORAGE_ORDER (tem) = REF_REVERSE_STORAGE_ORDER (op0);
7898 return tem;
7899 }
7900
7901 return NULL_TREE;
7902
7903 case NEGATE_EXPR:
7904 tem = fold_negate_expr (loc, arg0);
7905 if (tem)
7906 return fold_convert_loc (loc, type, tem);
7907 return NULL_TREE;
7908
7909 case ABS_EXPR:
7910 /* Convert fabs((double)float) into (double)fabsf(float). */
7911 if (TREE_CODE (arg0) == NOP_EXPR
7912 && TREE_CODE (type) == REAL_TYPE)
7913 {
7914 tree targ0 = strip_float_extensions (arg0);
7915 if (targ0 != arg0)
7916 return fold_convert_loc (loc, type,
7917 fold_build1_loc (loc, ABS_EXPR,
7918 TREE_TYPE (targ0),
7919 targ0));
7920 }
7921 return NULL_TREE;
7922
7923 case BIT_NOT_EXPR:
7924 /* Convert ~(X ^ Y) to ~X ^ Y or X ^ ~Y if ~X or ~Y simplify. */
7925 if (TREE_CODE (arg0) == BIT_XOR_EXPR
7926 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7927 fold_convert_loc (loc, type,
7928 TREE_OPERAND (arg0, 0)))))
7929 return fold_build2_loc (loc, BIT_XOR_EXPR, type, tem,
7930 fold_convert_loc (loc, type,
7931 TREE_OPERAND (arg0, 1)));
7932 else if (TREE_CODE (arg0) == BIT_XOR_EXPR
7933 && (tem = fold_unary_loc (loc, BIT_NOT_EXPR, type,
7934 fold_convert_loc (loc, type,
7935 TREE_OPERAND (arg0, 1)))))
7936 return fold_build2_loc (loc, BIT_XOR_EXPR, type,
7937 fold_convert_loc (loc, type,
7938 TREE_OPERAND (arg0, 0)), tem);
7939
7940 return NULL_TREE;
7941
7942 case TRUTH_NOT_EXPR:
7943 /* Note that the operand of this must be an int
7944 and its values must be 0 or 1.
7945 ("true" is a fixed value perhaps depending on the language,
7946 but we don't handle values other than 1 correctly yet.) */
7947 tem = fold_truth_not_expr (loc, arg0);
7948 if (!tem)
7949 return NULL_TREE;
7950 return fold_convert_loc (loc, type, tem);
7951
7952 case INDIRECT_REF:
7953 /* Fold *&X to X if X is an lvalue. */
7954 if (TREE_CODE (op0) == ADDR_EXPR)
7955 {
7956 tree op00 = TREE_OPERAND (op0, 0);
7957 if ((TREE_CODE (op00) == VAR_DECL
7958 || TREE_CODE (op00) == PARM_DECL
7959 || TREE_CODE (op00) == RESULT_DECL)
7960 && !TREE_READONLY (op00))
7961 return op00;
7962 }
7963 return NULL_TREE;
7964
7965 default:
7966 return NULL_TREE;
7967 } /* switch (code) */
7968 }
7969
7970
7971 /* If the operation was a conversion do _not_ mark a resulting constant
7972 with TREE_OVERFLOW if the original constant was not. These conversions
7973 have implementation defined behavior and retaining the TREE_OVERFLOW
7974 flag here would confuse later passes such as VRP. */
7975 tree
7976 fold_unary_ignore_overflow_loc (location_t loc, enum tree_code code,
7977 tree type, tree op0)
7978 {
7979 tree res = fold_unary_loc (loc, code, type, op0);
7980 if (res
7981 && TREE_CODE (res) == INTEGER_CST
7982 && TREE_CODE (op0) == INTEGER_CST
7983 && CONVERT_EXPR_CODE_P (code))
7984 TREE_OVERFLOW (res) = TREE_OVERFLOW (op0);
7985
7986 return res;
7987 }
7988
7989 /* Fold a binary bitwise/truth expression of code CODE and type TYPE with
7990 operands OP0 and OP1. LOC is the location of the resulting expression.
7991 ARG0 and ARG1 are the NOP_STRIPed results of OP0 and OP1.
7992 Return the folded expression if folding is successful. Otherwise,
7993 return NULL_TREE. */
7994 static tree
7995 fold_truth_andor (location_t loc, enum tree_code code, tree type,
7996 tree arg0, tree arg1, tree op0, tree op1)
7997 {
7998 tree tem;
7999
8000 /* We only do these simplifications if we are optimizing. */
8001 if (!optimize)
8002 return NULL_TREE;
8003
8004 /* Check for things like (A || B) && (A || C). We can convert this
8005 to A || (B && C). Note that either operator can be any of the four
8006 truth and/or operations and the transformation will still be
8007 valid. Also note that we only care about order for the
8008 ANDIF and ORIF operators. If B contains side effects, this
8009 might change the truth-value of A. */
8010 if (TREE_CODE (arg0) == TREE_CODE (arg1)
8011 && (TREE_CODE (arg0) == TRUTH_ANDIF_EXPR
8012 || TREE_CODE (arg0) == TRUTH_ORIF_EXPR
8013 || TREE_CODE (arg0) == TRUTH_AND_EXPR
8014 || TREE_CODE (arg0) == TRUTH_OR_EXPR)
8015 && ! TREE_SIDE_EFFECTS (TREE_OPERAND (arg0, 1)))
8016 {
8017 tree a00 = TREE_OPERAND (arg0, 0);
8018 tree a01 = TREE_OPERAND (arg0, 1);
8019 tree a10 = TREE_OPERAND (arg1, 0);
8020 tree a11 = TREE_OPERAND (arg1, 1);
8021 int commutative = ((TREE_CODE (arg0) == TRUTH_OR_EXPR
8022 || TREE_CODE (arg0) == TRUTH_AND_EXPR)
8023 && (code == TRUTH_AND_EXPR
8024 || code == TRUTH_OR_EXPR));
8025
8026 if (operand_equal_p (a00, a10, 0))
8027 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8028 fold_build2_loc (loc, code, type, a01, a11));
8029 else if (commutative && operand_equal_p (a00, a11, 0))
8030 return fold_build2_loc (loc, TREE_CODE (arg0), type, a00,
8031 fold_build2_loc (loc, code, type, a01, a10));
8032 else if (commutative && operand_equal_p (a01, a10, 0))
8033 return fold_build2_loc (loc, TREE_CODE (arg0), type, a01,
8034 fold_build2_loc (loc, code, type, a00, a11));
8035
8036 /* This case if tricky because we must either have commutative
8037 operators or else A10 must not have side-effects. */
8038
8039 else if ((commutative || ! TREE_SIDE_EFFECTS (a10))
8040 && operand_equal_p (a01, a11, 0))
8041 return fold_build2_loc (loc, TREE_CODE (arg0), type,
8042 fold_build2_loc (loc, code, type, a00, a10),
8043 a01);
8044 }
8045
8046 /* See if we can build a range comparison. */
8047 if (0 != (tem = fold_range_test (loc, code, type, op0, op1)))
8048 return tem;
8049
8050 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg0) == TRUTH_ORIF_EXPR)
8051 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg0) == TRUTH_ANDIF_EXPR))
8052 {
8053 tem = merge_truthop_with_opposite_arm (loc, arg0, arg1, true);
8054 if (tem)
8055 return fold_build2_loc (loc, code, type, tem, arg1);
8056 }
8057
8058 if ((code == TRUTH_ANDIF_EXPR && TREE_CODE (arg1) == TRUTH_ORIF_EXPR)
8059 || (code == TRUTH_ORIF_EXPR && TREE_CODE (arg1) == TRUTH_ANDIF_EXPR))
8060 {
8061 tem = merge_truthop_with_opposite_arm (loc, arg1, arg0, false);
8062 if (tem)
8063 return fold_build2_loc (loc, code, type, arg0, tem);
8064 }
8065
8066 /* Check for the possibility of merging component references. If our
8067 lhs is another similar operation, try to merge its rhs with our
8068 rhs. Then try to merge our lhs and rhs. */
8069 if (TREE_CODE (arg0) == code
8070 && 0 != (tem = fold_truth_andor_1 (loc, code, type,
8071 TREE_OPERAND (arg0, 1), arg1)))
8072 return fold_build2_loc (loc, code, type, TREE_OPERAND (arg0, 0), tem);
8073
8074 if ((tem = fold_truth_andor_1 (loc, code, type, arg0, arg1)) != 0)
8075 return tem;
8076
8077 if (LOGICAL_OP_NON_SHORT_CIRCUIT
8078 && (code == TRUTH_AND_EXPR
8079 || code == TRUTH_ANDIF_EXPR
8080 || code == TRUTH_OR_EXPR
8081 || code == TRUTH_ORIF_EXPR))
8082 {
8083 enum tree_code ncode, icode;
8084
8085 ncode = (code == TRUTH_ANDIF_EXPR || code == TRUTH_AND_EXPR)
8086 ? TRUTH_AND_EXPR : TRUTH_OR_EXPR;
8087 icode = ncode == TRUTH_AND_EXPR ? TRUTH_ANDIF_EXPR : TRUTH_ORIF_EXPR;
8088
8089 /* Transform ((A AND-IF B) AND[-IF] C) into (A AND-IF (B AND C)),
8090 or ((A OR-IF B) OR[-IF] C) into (A OR-IF (B OR C))
8091 We don't want to pack more than two leafs to a non-IF AND/OR
8092 expression.
8093 If tree-code of left-hand operand isn't an AND/OR-IF code and not
8094 equal to IF-CODE, then we don't want to add right-hand operand.
8095 If the inner right-hand side of left-hand operand has
8096 side-effects, or isn't simple, then we can't add to it,
8097 as otherwise we might destroy if-sequence. */
8098 if (TREE_CODE (arg0) == icode
8099 && simple_operand_p_2 (arg1)
8100 /* Needed for sequence points to handle trappings, and
8101 side-effects. */
8102 && simple_operand_p_2 (TREE_OPERAND (arg0, 1)))
8103 {
8104 tem = fold_build2_loc (loc, ncode, type, TREE_OPERAND (arg0, 1),
8105 arg1);
8106 return fold_build2_loc (loc, icode, type, TREE_OPERAND (arg0, 0),
8107 tem);
8108 }
8109 /* Same as abouve but for (A AND[-IF] (B AND-IF C)) -> ((A AND B) AND-IF C),
8110 or (A OR[-IF] (B OR-IF C) -> ((A OR B) OR-IF C). */
8111 else if (TREE_CODE (arg1) == icode
8112 && simple_operand_p_2 (arg0)
8113 /* Needed for sequence points to handle trappings, and
8114 side-effects. */
8115 && simple_operand_p_2 (TREE_OPERAND (arg1, 0)))
8116 {
8117 tem = fold_build2_loc (loc, ncode, type,
8118 arg0, TREE_OPERAND (arg1, 0));
8119 return fold_build2_loc (loc, icode, type, tem,
8120 TREE_OPERAND (arg1, 1));
8121 }
8122 /* Transform (A AND-IF B) into (A AND B), or (A OR-IF B)
8123 into (A OR B).
8124 For sequence point consistancy, we need to check for trapping,
8125 and side-effects. */
8126 else if (code == icode && simple_operand_p_2 (arg0)
8127 && simple_operand_p_2 (arg1))
8128 return fold_build2_loc (loc, ncode, type, arg0, arg1);
8129 }
8130
8131 return NULL_TREE;
8132 }
8133
8134 /* Helper that tries to canonicalize the comparison ARG0 CODE ARG1
8135 by changing CODE to reduce the magnitude of constants involved in
8136 ARG0 of the comparison.
8137 Returns a canonicalized comparison tree if a simplification was
8138 possible, otherwise returns NULL_TREE.
8139 Set *STRICT_OVERFLOW_P to true if the canonicalization is only
8140 valid if signed overflow is undefined. */
8141
8142 static tree
8143 maybe_canonicalize_comparison_1 (location_t loc, enum tree_code code, tree type,
8144 tree arg0, tree arg1,
8145 bool *strict_overflow_p)
8146 {
8147 enum tree_code code0 = TREE_CODE (arg0);
8148 tree t, cst0 = NULL_TREE;
8149 int sgn0;
8150
8151 /* Match A +- CST code arg1. We can change this only if overflow
8152 is undefined. */
8153 if (!((ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8154 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0)))
8155 /* In principle pointers also have undefined overflow behavior,
8156 but that causes problems elsewhere. */
8157 && !POINTER_TYPE_P (TREE_TYPE (arg0))
8158 && (code0 == MINUS_EXPR
8159 || code0 == PLUS_EXPR)
8160 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST))
8161 return NULL_TREE;
8162
8163 /* Identify the constant in arg0 and its sign. */
8164 cst0 = TREE_OPERAND (arg0, 1);
8165 sgn0 = tree_int_cst_sgn (cst0);
8166
8167 /* Overflowed constants and zero will cause problems. */
8168 if (integer_zerop (cst0)
8169 || TREE_OVERFLOW (cst0))
8170 return NULL_TREE;
8171
8172 /* See if we can reduce the magnitude of the constant in
8173 arg0 by changing the comparison code. */
8174 /* A - CST < arg1 -> A - CST-1 <= arg1. */
8175 if (code == LT_EXPR
8176 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8177 code = LE_EXPR;
8178 /* A + CST > arg1 -> A + CST-1 >= arg1. */
8179 else if (code == GT_EXPR
8180 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8181 code = GE_EXPR;
8182 /* A + CST <= arg1 -> A + CST-1 < arg1. */
8183 else if (code == LE_EXPR
8184 && code0 == ((sgn0 == -1) ? MINUS_EXPR : PLUS_EXPR))
8185 code = LT_EXPR;
8186 /* A - CST >= arg1 -> A - CST-1 > arg1. */
8187 else if (code == GE_EXPR
8188 && code0 == ((sgn0 == -1) ? PLUS_EXPR : MINUS_EXPR))
8189 code = GT_EXPR;
8190 else
8191 return NULL_TREE;
8192 *strict_overflow_p = true;
8193
8194 /* Now build the constant reduced in magnitude. But not if that
8195 would produce one outside of its types range. */
8196 if (INTEGRAL_TYPE_P (TREE_TYPE (cst0))
8197 && ((sgn0 == 1
8198 && TYPE_MIN_VALUE (TREE_TYPE (cst0))
8199 && tree_int_cst_equal (cst0, TYPE_MIN_VALUE (TREE_TYPE (cst0))))
8200 || (sgn0 == -1
8201 && TYPE_MAX_VALUE (TREE_TYPE (cst0))
8202 && tree_int_cst_equal (cst0, TYPE_MAX_VALUE (TREE_TYPE (cst0))))))
8203 return NULL_TREE;
8204
8205 t = int_const_binop (sgn0 == -1 ? PLUS_EXPR : MINUS_EXPR,
8206 cst0, build_int_cst (TREE_TYPE (cst0), 1));
8207 t = fold_build2_loc (loc, code0, TREE_TYPE (arg0), TREE_OPERAND (arg0, 0), t);
8208 t = fold_convert (TREE_TYPE (arg1), t);
8209
8210 return fold_build2_loc (loc, code, type, t, arg1);
8211 }
8212
8213 /* Canonicalize the comparison ARG0 CODE ARG1 with type TYPE with undefined
8214 overflow further. Try to decrease the magnitude of constants involved
8215 by changing LE_EXPR and GE_EXPR to LT_EXPR and GT_EXPR or vice versa
8216 and put sole constants at the second argument position.
8217 Returns the canonicalized tree if changed, otherwise NULL_TREE. */
8218
8219 static tree
8220 maybe_canonicalize_comparison (location_t loc, enum tree_code code, tree type,
8221 tree arg0, tree arg1)
8222 {
8223 tree t;
8224 bool strict_overflow_p;
8225 const char * const warnmsg = G_("assuming signed overflow does not occur "
8226 "when reducing constant in comparison");
8227
8228 /* Try canonicalization by simplifying arg0. */
8229 strict_overflow_p = false;
8230 t = maybe_canonicalize_comparison_1 (loc, code, type, arg0, arg1,
8231 &strict_overflow_p);
8232 if (t)
8233 {
8234 if (strict_overflow_p)
8235 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8236 return t;
8237 }
8238
8239 /* Try canonicalization by simplifying arg1 using the swapped
8240 comparison. */
8241 code = swap_tree_comparison (code);
8242 strict_overflow_p = false;
8243 t = maybe_canonicalize_comparison_1 (loc, code, type, arg1, arg0,
8244 &strict_overflow_p);
8245 if (t && strict_overflow_p)
8246 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_MAGNITUDE);
8247 return t;
8248 }
8249
8250 /* Return whether BASE + OFFSET + BITPOS may wrap around the address
8251 space. This is used to avoid issuing overflow warnings for
8252 expressions like &p->x which can not wrap. */
8253
8254 static bool
8255 pointer_may_wrap_p (tree base, tree offset, HOST_WIDE_INT bitpos)
8256 {
8257 if (!POINTER_TYPE_P (TREE_TYPE (base)))
8258 return true;
8259
8260 if (bitpos < 0)
8261 return true;
8262
8263 wide_int wi_offset;
8264 int precision = TYPE_PRECISION (TREE_TYPE (base));
8265 if (offset == NULL_TREE)
8266 wi_offset = wi::zero (precision);
8267 else if (TREE_CODE (offset) != INTEGER_CST || TREE_OVERFLOW (offset))
8268 return true;
8269 else
8270 wi_offset = offset;
8271
8272 bool overflow;
8273 wide_int units = wi::shwi (bitpos / BITS_PER_UNIT, precision);
8274 wide_int total = wi::add (wi_offset, units, UNSIGNED, &overflow);
8275 if (overflow)
8276 return true;
8277
8278 if (!wi::fits_uhwi_p (total))
8279 return true;
8280
8281 HOST_WIDE_INT size = int_size_in_bytes (TREE_TYPE (TREE_TYPE (base)));
8282 if (size <= 0)
8283 return true;
8284
8285 /* We can do slightly better for SIZE if we have an ADDR_EXPR of an
8286 array. */
8287 if (TREE_CODE (base) == ADDR_EXPR)
8288 {
8289 HOST_WIDE_INT base_size;
8290
8291 base_size = int_size_in_bytes (TREE_TYPE (TREE_OPERAND (base, 0)));
8292 if (base_size > 0 && size < base_size)
8293 size = base_size;
8294 }
8295
8296 return total.to_uhwi () > (unsigned HOST_WIDE_INT) size;
8297 }
8298
8299 /* Subroutine of fold_binary. This routine performs all of the
8300 transformations that are common to the equality/inequality
8301 operators (EQ_EXPR and NE_EXPR) and the ordering operators
8302 (LT_EXPR, LE_EXPR, GE_EXPR and GT_EXPR). Callers other than
8303 fold_binary should call fold_binary. Fold a comparison with
8304 tree code CODE and type TYPE with operands OP0 and OP1. Return
8305 the folded comparison or NULL_TREE. */
8306
8307 static tree
8308 fold_comparison (location_t loc, enum tree_code code, tree type,
8309 tree op0, tree op1)
8310 {
8311 const bool equality_code = (code == EQ_EXPR || code == NE_EXPR);
8312 tree arg0, arg1, tem;
8313
8314 arg0 = op0;
8315 arg1 = op1;
8316
8317 STRIP_SIGN_NOPS (arg0);
8318 STRIP_SIGN_NOPS (arg1);
8319
8320 /* Transform comparisons of the form X +- C1 CMP C2 to X CMP C2 -+ C1. */
8321 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8322 && (equality_code
8323 || (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8324 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))))
8325 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8326 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8327 && TREE_CODE (arg1) == INTEGER_CST
8328 && !TREE_OVERFLOW (arg1))
8329 {
8330 const enum tree_code
8331 reverse_op = TREE_CODE (arg0) == PLUS_EXPR ? MINUS_EXPR : PLUS_EXPR;
8332 tree const1 = TREE_OPERAND (arg0, 1);
8333 tree const2 = fold_convert_loc (loc, TREE_TYPE (const1), arg1);
8334 tree variable = TREE_OPERAND (arg0, 0);
8335 tree new_const = int_const_binop (reverse_op, const2, const1);
8336
8337 /* If the constant operation overflowed this can be
8338 simplified as a comparison against INT_MAX/INT_MIN. */
8339 if (TREE_OVERFLOW (new_const)
8340 && !TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
8341 {
8342 int const1_sgn = tree_int_cst_sgn (const1);
8343 enum tree_code code2 = code;
8344
8345 /* Get the sign of the constant on the lhs if the
8346 operation were VARIABLE + CONST1. */
8347 if (TREE_CODE (arg0) == MINUS_EXPR)
8348 const1_sgn = -const1_sgn;
8349
8350 /* The sign of the constant determines if we overflowed
8351 INT_MAX (const1_sgn == -1) or INT_MIN (const1_sgn == 1).
8352 Canonicalize to the INT_MIN overflow by swapping the comparison
8353 if necessary. */
8354 if (const1_sgn == -1)
8355 code2 = swap_tree_comparison (code);
8356
8357 /* We now can look at the canonicalized case
8358 VARIABLE + 1 CODE2 INT_MIN
8359 and decide on the result. */
8360 switch (code2)
8361 {
8362 case EQ_EXPR:
8363 case LT_EXPR:
8364 case LE_EXPR:
8365 return
8366 omit_one_operand_loc (loc, type, boolean_false_node, variable);
8367
8368 case NE_EXPR:
8369 case GE_EXPR:
8370 case GT_EXPR:
8371 return
8372 omit_one_operand_loc (loc, type, boolean_true_node, variable);
8373
8374 default:
8375 gcc_unreachable ();
8376 }
8377 }
8378 else
8379 {
8380 if (!equality_code)
8381 fold_overflow_warning ("assuming signed overflow does not occur "
8382 "when changing X +- C1 cmp C2 to "
8383 "X cmp C2 -+ C1",
8384 WARN_STRICT_OVERFLOW_COMPARISON);
8385 return fold_build2_loc (loc, code, type, variable, new_const);
8386 }
8387 }
8388
8389 /* For comparisons of pointers we can decompose it to a compile time
8390 comparison of the base objects and the offsets into the object.
8391 This requires at least one operand being an ADDR_EXPR or a
8392 POINTER_PLUS_EXPR to do more than the operand_equal_p test below. */
8393 if (POINTER_TYPE_P (TREE_TYPE (arg0))
8394 && (TREE_CODE (arg0) == ADDR_EXPR
8395 || TREE_CODE (arg1) == ADDR_EXPR
8396 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
8397 || TREE_CODE (arg1) == POINTER_PLUS_EXPR))
8398 {
8399 tree base0, base1, offset0 = NULL_TREE, offset1 = NULL_TREE;
8400 HOST_WIDE_INT bitsize, bitpos0 = 0, bitpos1 = 0;
8401 machine_mode mode;
8402 int volatilep, reversep, unsignedp;
8403 bool indirect_base0 = false, indirect_base1 = false;
8404
8405 /* Get base and offset for the access. Strip ADDR_EXPR for
8406 get_inner_reference, but put it back by stripping INDIRECT_REF
8407 off the base object if possible. indirect_baseN will be true
8408 if baseN is not an address but refers to the object itself. */
8409 base0 = arg0;
8410 if (TREE_CODE (arg0) == ADDR_EXPR)
8411 {
8412 base0
8413 = get_inner_reference (TREE_OPERAND (arg0, 0),
8414 &bitsize, &bitpos0, &offset0, &mode,
8415 &unsignedp, &reversep, &volatilep, false);
8416 if (TREE_CODE (base0) == INDIRECT_REF)
8417 base0 = TREE_OPERAND (base0, 0);
8418 else
8419 indirect_base0 = true;
8420 }
8421 else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
8422 {
8423 base0 = TREE_OPERAND (arg0, 0);
8424 STRIP_SIGN_NOPS (base0);
8425 if (TREE_CODE (base0) == ADDR_EXPR)
8426 {
8427 base0
8428 = get_inner_reference (TREE_OPERAND (base0, 0),
8429 &bitsize, &bitpos0, &offset0, &mode,
8430 &unsignedp, &reversep, &volatilep,
8431 false);
8432 if (TREE_CODE (base0) == INDIRECT_REF)
8433 base0 = TREE_OPERAND (base0, 0);
8434 else
8435 indirect_base0 = true;
8436 }
8437 if (offset0 == NULL_TREE || integer_zerop (offset0))
8438 offset0 = TREE_OPERAND (arg0, 1);
8439 else
8440 offset0 = size_binop (PLUS_EXPR, offset0,
8441 TREE_OPERAND (arg0, 1));
8442 if (TREE_CODE (offset0) == INTEGER_CST)
8443 {
8444 offset_int tem = wi::sext (wi::to_offset (offset0),
8445 TYPE_PRECISION (sizetype));
8446 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8447 tem += bitpos0;
8448 if (wi::fits_shwi_p (tem))
8449 {
8450 bitpos0 = tem.to_shwi ();
8451 offset0 = NULL_TREE;
8452 }
8453 }
8454 }
8455
8456 base1 = arg1;
8457 if (TREE_CODE (arg1) == ADDR_EXPR)
8458 {
8459 base1
8460 = get_inner_reference (TREE_OPERAND (arg1, 0),
8461 &bitsize, &bitpos1, &offset1, &mode,
8462 &unsignedp, &reversep, &volatilep, false);
8463 if (TREE_CODE (base1) == INDIRECT_REF)
8464 base1 = TREE_OPERAND (base1, 0);
8465 else
8466 indirect_base1 = true;
8467 }
8468 else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
8469 {
8470 base1 = TREE_OPERAND (arg1, 0);
8471 STRIP_SIGN_NOPS (base1);
8472 if (TREE_CODE (base1) == ADDR_EXPR)
8473 {
8474 base1
8475 = get_inner_reference (TREE_OPERAND (base1, 0),
8476 &bitsize, &bitpos1, &offset1, &mode,
8477 &unsignedp, &reversep, &volatilep,
8478 false);
8479 if (TREE_CODE (base1) == INDIRECT_REF)
8480 base1 = TREE_OPERAND (base1, 0);
8481 else
8482 indirect_base1 = true;
8483 }
8484 if (offset1 == NULL_TREE || integer_zerop (offset1))
8485 offset1 = TREE_OPERAND (arg1, 1);
8486 else
8487 offset1 = size_binop (PLUS_EXPR, offset1,
8488 TREE_OPERAND (arg1, 1));
8489 if (TREE_CODE (offset1) == INTEGER_CST)
8490 {
8491 offset_int tem = wi::sext (wi::to_offset (offset1),
8492 TYPE_PRECISION (sizetype));
8493 tem = wi::lshift (tem, LOG2_BITS_PER_UNIT);
8494 tem += bitpos1;
8495 if (wi::fits_shwi_p (tem))
8496 {
8497 bitpos1 = tem.to_shwi ();
8498 offset1 = NULL_TREE;
8499 }
8500 }
8501 }
8502
8503 /* If we have equivalent bases we might be able to simplify. */
8504 if (indirect_base0 == indirect_base1
8505 && operand_equal_p (base0, base1,
8506 indirect_base0 ? OEP_ADDRESS_OF : 0))
8507 {
8508 /* We can fold this expression to a constant if the non-constant
8509 offset parts are equal. */
8510 if ((offset0 == offset1
8511 || (offset0 && offset1
8512 && operand_equal_p (offset0, offset1, 0)))
8513 && (code == EQ_EXPR
8514 || code == NE_EXPR
8515 || (indirect_base0 && DECL_P (base0))
8516 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8517
8518 {
8519 if (!equality_code
8520 && bitpos0 != bitpos1
8521 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8522 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8523 fold_overflow_warning (("assuming pointer wraparound does not "
8524 "occur when comparing P +- C1 with "
8525 "P +- C2"),
8526 WARN_STRICT_OVERFLOW_CONDITIONAL);
8527
8528 switch (code)
8529 {
8530 case EQ_EXPR:
8531 return constant_boolean_node (bitpos0 == bitpos1, type);
8532 case NE_EXPR:
8533 return constant_boolean_node (bitpos0 != bitpos1, type);
8534 case LT_EXPR:
8535 return constant_boolean_node (bitpos0 < bitpos1, type);
8536 case LE_EXPR:
8537 return constant_boolean_node (bitpos0 <= bitpos1, type);
8538 case GE_EXPR:
8539 return constant_boolean_node (bitpos0 >= bitpos1, type);
8540 case GT_EXPR:
8541 return constant_boolean_node (bitpos0 > bitpos1, type);
8542 default:;
8543 }
8544 }
8545 /* We can simplify the comparison to a comparison of the variable
8546 offset parts if the constant offset parts are equal.
8547 Be careful to use signed sizetype here because otherwise we
8548 mess with array offsets in the wrong way. This is possible
8549 because pointer arithmetic is restricted to retain within an
8550 object and overflow on pointer differences is undefined as of
8551 6.5.6/8 and /9 with respect to the signed ptrdiff_t. */
8552 else if (bitpos0 == bitpos1
8553 && (equality_code
8554 || (indirect_base0 && DECL_P (base0))
8555 || POINTER_TYPE_OVERFLOW_UNDEFINED))
8556 {
8557 /* By converting to signed sizetype we cover middle-end pointer
8558 arithmetic which operates on unsigned pointer types of size
8559 type size and ARRAY_REF offsets which are properly sign or
8560 zero extended from their type in case it is narrower than
8561 sizetype. */
8562 if (offset0 == NULL_TREE)
8563 offset0 = build_int_cst (ssizetype, 0);
8564 else
8565 offset0 = fold_convert_loc (loc, ssizetype, offset0);
8566 if (offset1 == NULL_TREE)
8567 offset1 = build_int_cst (ssizetype, 0);
8568 else
8569 offset1 = fold_convert_loc (loc, ssizetype, offset1);
8570
8571 if (!equality_code
8572 && (pointer_may_wrap_p (base0, offset0, bitpos0)
8573 || pointer_may_wrap_p (base1, offset1, bitpos1)))
8574 fold_overflow_warning (("assuming pointer wraparound does not "
8575 "occur when comparing P +- C1 with "
8576 "P +- C2"),
8577 WARN_STRICT_OVERFLOW_COMPARISON);
8578
8579 return fold_build2_loc (loc, code, type, offset0, offset1);
8580 }
8581 }
8582 /* For equal offsets we can simplify to a comparison of the
8583 base addresses. */
8584 else if (bitpos0 == bitpos1
8585 && (indirect_base0
8586 ? base0 != TREE_OPERAND (arg0, 0) : base0 != arg0)
8587 && (indirect_base1
8588 ? base1 != TREE_OPERAND (arg1, 0) : base1 != arg1)
8589 && ((offset0 == offset1)
8590 || (offset0 && offset1
8591 && operand_equal_p (offset0, offset1, 0))))
8592 {
8593 if (indirect_base0)
8594 base0 = build_fold_addr_expr_loc (loc, base0);
8595 if (indirect_base1)
8596 base1 = build_fold_addr_expr_loc (loc, base1);
8597 return fold_build2_loc (loc, code, type, base0, base1);
8598 }
8599 }
8600
8601 /* Transform comparisons of the form X +- C1 CMP Y +- C2 to
8602 X CMP Y +- C2 +- C1 for signed X, Y. This is valid if
8603 the resulting offset is smaller in absolute value than the
8604 original one and has the same sign. */
8605 if (ANY_INTEGRAL_TYPE_P (TREE_TYPE (arg0))
8606 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg0))
8607 && (TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
8608 && (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8609 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1)))
8610 && (TREE_CODE (arg1) == PLUS_EXPR || TREE_CODE (arg1) == MINUS_EXPR)
8611 && (TREE_CODE (TREE_OPERAND (arg1, 1)) == INTEGER_CST
8612 && !TREE_OVERFLOW (TREE_OPERAND (arg1, 1))))
8613 {
8614 tree const1 = TREE_OPERAND (arg0, 1);
8615 tree const2 = TREE_OPERAND (arg1, 1);
8616 tree variable1 = TREE_OPERAND (arg0, 0);
8617 tree variable2 = TREE_OPERAND (arg1, 0);
8618 tree cst;
8619 const char * const warnmsg = G_("assuming signed overflow does not "
8620 "occur when combining constants around "
8621 "a comparison");
8622
8623 /* Put the constant on the side where it doesn't overflow and is
8624 of lower absolute value and of same sign than before. */
8625 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8626 ? MINUS_EXPR : PLUS_EXPR,
8627 const2, const1);
8628 if (!TREE_OVERFLOW (cst)
8629 && tree_int_cst_compare (const2, cst) == tree_int_cst_sgn (const2)
8630 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const2))
8631 {
8632 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8633 return fold_build2_loc (loc, code, type,
8634 variable1,
8635 fold_build2_loc (loc, TREE_CODE (arg1),
8636 TREE_TYPE (arg1),
8637 variable2, cst));
8638 }
8639
8640 cst = int_const_binop (TREE_CODE (arg0) == TREE_CODE (arg1)
8641 ? MINUS_EXPR : PLUS_EXPR,
8642 const1, const2);
8643 if (!TREE_OVERFLOW (cst)
8644 && tree_int_cst_compare (const1, cst) == tree_int_cst_sgn (const1)
8645 && tree_int_cst_sgn (cst) == tree_int_cst_sgn (const1))
8646 {
8647 fold_overflow_warning (warnmsg, WARN_STRICT_OVERFLOW_COMPARISON);
8648 return fold_build2_loc (loc, code, type,
8649 fold_build2_loc (loc, TREE_CODE (arg0),
8650 TREE_TYPE (arg0),
8651 variable1, cst),
8652 variable2);
8653 }
8654 }
8655
8656 tem = maybe_canonicalize_comparison (loc, code, type, arg0, arg1);
8657 if (tem)
8658 return tem;
8659
8660 /* If this is comparing a constant with a MIN_EXPR or a MAX_EXPR of a
8661 constant, we can simplify it. */
8662 if (TREE_CODE (arg1) == INTEGER_CST
8663 && (TREE_CODE (arg0) == MIN_EXPR
8664 || TREE_CODE (arg0) == MAX_EXPR)
8665 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
8666 {
8667 tem = optimize_minmax_comparison (loc, code, type, op0, op1);
8668 if (tem)
8669 return tem;
8670 }
8671
8672 /* If we are comparing an expression that just has comparisons
8673 of two integer values, arithmetic expressions of those comparisons,
8674 and constants, we can simplify it. There are only three cases
8675 to check: the two values can either be equal, the first can be
8676 greater, or the second can be greater. Fold the expression for
8677 those three values. Since each value must be 0 or 1, we have
8678 eight possibilities, each of which corresponds to the constant 0
8679 or 1 or one of the six possible comparisons.
8680
8681 This handles common cases like (a > b) == 0 but also handles
8682 expressions like ((x > y) - (y > x)) > 0, which supposedly
8683 occur in macroized code. */
8684
8685 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) != INTEGER_CST)
8686 {
8687 tree cval1 = 0, cval2 = 0;
8688 int save_p = 0;
8689
8690 if (twoval_comparison_p (arg0, &cval1, &cval2, &save_p)
8691 /* Don't handle degenerate cases here; they should already
8692 have been handled anyway. */
8693 && cval1 != 0 && cval2 != 0
8694 && ! (TREE_CONSTANT (cval1) && TREE_CONSTANT (cval2))
8695 && TREE_TYPE (cval1) == TREE_TYPE (cval2)
8696 && INTEGRAL_TYPE_P (TREE_TYPE (cval1))
8697 && TYPE_MAX_VALUE (TREE_TYPE (cval1))
8698 && TYPE_MAX_VALUE (TREE_TYPE (cval2))
8699 && ! operand_equal_p (TYPE_MIN_VALUE (TREE_TYPE (cval1)),
8700 TYPE_MAX_VALUE (TREE_TYPE (cval2)), 0))
8701 {
8702 tree maxval = TYPE_MAX_VALUE (TREE_TYPE (cval1));
8703 tree minval = TYPE_MIN_VALUE (TREE_TYPE (cval1));
8704
8705 /* We can't just pass T to eval_subst in case cval1 or cval2
8706 was the same as ARG1. */
8707
8708 tree high_result
8709 = fold_build2_loc (loc, code, type,
8710 eval_subst (loc, arg0, cval1, maxval,
8711 cval2, minval),
8712 arg1);
8713 tree equal_result
8714 = fold_build2_loc (loc, code, type,
8715 eval_subst (loc, arg0, cval1, maxval,
8716 cval2, maxval),
8717 arg1);
8718 tree low_result
8719 = fold_build2_loc (loc, code, type,
8720 eval_subst (loc, arg0, cval1, minval,
8721 cval2, maxval),
8722 arg1);
8723
8724 /* All three of these results should be 0 or 1. Confirm they are.
8725 Then use those values to select the proper code to use. */
8726
8727 if (TREE_CODE (high_result) == INTEGER_CST
8728 && TREE_CODE (equal_result) == INTEGER_CST
8729 && TREE_CODE (low_result) == INTEGER_CST)
8730 {
8731 /* Make a 3-bit mask with the high-order bit being the
8732 value for `>', the next for '=', and the low for '<'. */
8733 switch ((integer_onep (high_result) * 4)
8734 + (integer_onep (equal_result) * 2)
8735 + integer_onep (low_result))
8736 {
8737 case 0:
8738 /* Always false. */
8739 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
8740 case 1:
8741 code = LT_EXPR;
8742 break;
8743 case 2:
8744 code = EQ_EXPR;
8745 break;
8746 case 3:
8747 code = LE_EXPR;
8748 break;
8749 case 4:
8750 code = GT_EXPR;
8751 break;
8752 case 5:
8753 code = NE_EXPR;
8754 break;
8755 case 6:
8756 code = GE_EXPR;
8757 break;
8758 case 7:
8759 /* Always true. */
8760 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
8761 }
8762
8763 if (save_p)
8764 {
8765 tem = save_expr (build2 (code, type, cval1, cval2));
8766 SET_EXPR_LOCATION (tem, loc);
8767 return tem;
8768 }
8769 return fold_build2_loc (loc, code, type, cval1, cval2);
8770 }
8771 }
8772 }
8773
8774 /* We can fold X/C1 op C2 where C1 and C2 are integer constants
8775 into a single range test. */
8776 if ((TREE_CODE (arg0) == TRUNC_DIV_EXPR
8777 || TREE_CODE (arg0) == EXACT_DIV_EXPR)
8778 && TREE_CODE (arg1) == INTEGER_CST
8779 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
8780 && !integer_zerop (TREE_OPERAND (arg0, 1))
8781 && !TREE_OVERFLOW (TREE_OPERAND (arg0, 1))
8782 && !TREE_OVERFLOW (arg1))
8783 {
8784 tem = fold_div_compare (loc, code, type, arg0, arg1);
8785 if (tem != NULL_TREE)
8786 return tem;
8787 }
8788
8789 return NULL_TREE;
8790 }
8791
8792
8793 /* Subroutine of fold_binary. Optimize complex multiplications of the
8794 form z * conj(z), as pow(realpart(z),2) + pow(imagpart(z),2). The
8795 argument EXPR represents the expression "z" of type TYPE. */
8796
8797 static tree
8798 fold_mult_zconjz (location_t loc, tree type, tree expr)
8799 {
8800 tree itype = TREE_TYPE (type);
8801 tree rpart, ipart, tem;
8802
8803 if (TREE_CODE (expr) == COMPLEX_EXPR)
8804 {
8805 rpart = TREE_OPERAND (expr, 0);
8806 ipart = TREE_OPERAND (expr, 1);
8807 }
8808 else if (TREE_CODE (expr) == COMPLEX_CST)
8809 {
8810 rpart = TREE_REALPART (expr);
8811 ipart = TREE_IMAGPART (expr);
8812 }
8813 else
8814 {
8815 expr = save_expr (expr);
8816 rpart = fold_build1_loc (loc, REALPART_EXPR, itype, expr);
8817 ipart = fold_build1_loc (loc, IMAGPART_EXPR, itype, expr);
8818 }
8819
8820 rpart = save_expr (rpart);
8821 ipart = save_expr (ipart);
8822 tem = fold_build2_loc (loc, PLUS_EXPR, itype,
8823 fold_build2_loc (loc, MULT_EXPR, itype, rpart, rpart),
8824 fold_build2_loc (loc, MULT_EXPR, itype, ipart, ipart));
8825 return fold_build2_loc (loc, COMPLEX_EXPR, type, tem,
8826 build_zero_cst (itype));
8827 }
8828
8829
8830 /* Helper function for fold_vec_perm. Store elements of VECTOR_CST or
8831 CONSTRUCTOR ARG into array ELTS and return true if successful. */
8832
8833 static bool
8834 vec_cst_ctor_to_array (tree arg, tree *elts)
8835 {
8836 unsigned int nelts = TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg)), i;
8837
8838 if (TREE_CODE (arg) == VECTOR_CST)
8839 {
8840 for (i = 0; i < VECTOR_CST_NELTS (arg); ++i)
8841 elts[i] = VECTOR_CST_ELT (arg, i);
8842 }
8843 else if (TREE_CODE (arg) == CONSTRUCTOR)
8844 {
8845 constructor_elt *elt;
8846
8847 FOR_EACH_VEC_SAFE_ELT (CONSTRUCTOR_ELTS (arg), i, elt)
8848 if (i >= nelts || TREE_CODE (TREE_TYPE (elt->value)) == VECTOR_TYPE)
8849 return false;
8850 else
8851 elts[i] = elt->value;
8852 }
8853 else
8854 return false;
8855 for (; i < nelts; i++)
8856 elts[i]
8857 = fold_convert (TREE_TYPE (TREE_TYPE (arg)), integer_zero_node);
8858 return true;
8859 }
8860
8861 /* Attempt to fold vector permutation of ARG0 and ARG1 vectors using SEL
8862 selector. Return the folded VECTOR_CST or CONSTRUCTOR if successful,
8863 NULL_TREE otherwise. */
8864
8865 static tree
8866 fold_vec_perm (tree type, tree arg0, tree arg1, const unsigned char *sel)
8867 {
8868 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
8869 tree *elts;
8870 bool need_ctor = false;
8871
8872 gcc_assert (TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)) == nelts
8873 && TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg1)) == nelts);
8874 if (TREE_TYPE (TREE_TYPE (arg0)) != TREE_TYPE (type)
8875 || TREE_TYPE (TREE_TYPE (arg1)) != TREE_TYPE (type))
8876 return NULL_TREE;
8877
8878 elts = XALLOCAVEC (tree, nelts * 3);
8879 if (!vec_cst_ctor_to_array (arg0, elts)
8880 || !vec_cst_ctor_to_array (arg1, elts + nelts))
8881 return NULL_TREE;
8882
8883 for (i = 0; i < nelts; i++)
8884 {
8885 if (!CONSTANT_CLASS_P (elts[sel[i]]))
8886 need_ctor = true;
8887 elts[i + 2 * nelts] = unshare_expr (elts[sel[i]]);
8888 }
8889
8890 if (need_ctor)
8891 {
8892 vec<constructor_elt, va_gc> *v;
8893 vec_alloc (v, nelts);
8894 for (i = 0; i < nelts; i++)
8895 CONSTRUCTOR_APPEND_ELT (v, NULL_TREE, elts[2 * nelts + i]);
8896 return build_constructor (type, v);
8897 }
8898 else
8899 return build_vector (type, &elts[2 * nelts]);
8900 }
8901
8902 /* Try to fold a pointer difference of type TYPE two address expressions of
8903 array references AREF0 and AREF1 using location LOC. Return a
8904 simplified expression for the difference or NULL_TREE. */
8905
8906 static tree
8907 fold_addr_of_array_ref_difference (location_t loc, tree type,
8908 tree aref0, tree aref1)
8909 {
8910 tree base0 = TREE_OPERAND (aref0, 0);
8911 tree base1 = TREE_OPERAND (aref1, 0);
8912 tree base_offset = build_int_cst (type, 0);
8913
8914 /* If the bases are array references as well, recurse. If the bases
8915 are pointer indirections compute the difference of the pointers.
8916 If the bases are equal, we are set. */
8917 if ((TREE_CODE (base0) == ARRAY_REF
8918 && TREE_CODE (base1) == ARRAY_REF
8919 && (base_offset
8920 = fold_addr_of_array_ref_difference (loc, type, base0, base1)))
8921 || (INDIRECT_REF_P (base0)
8922 && INDIRECT_REF_P (base1)
8923 && (base_offset
8924 = fold_binary_loc (loc, MINUS_EXPR, type,
8925 fold_convert (type, TREE_OPERAND (base0, 0)),
8926 fold_convert (type,
8927 TREE_OPERAND (base1, 0)))))
8928 || operand_equal_p (base0, base1, OEP_ADDRESS_OF))
8929 {
8930 tree op0 = fold_convert_loc (loc, type, TREE_OPERAND (aref0, 1));
8931 tree op1 = fold_convert_loc (loc, type, TREE_OPERAND (aref1, 1));
8932 tree esz = fold_convert_loc (loc, type, array_ref_element_size (aref0));
8933 tree diff = build2 (MINUS_EXPR, type, op0, op1);
8934 return fold_build2_loc (loc, PLUS_EXPR, type,
8935 base_offset,
8936 fold_build2_loc (loc, MULT_EXPR, type,
8937 diff, esz));
8938 }
8939 return NULL_TREE;
8940 }
8941
8942 /* If the real or vector real constant CST of type TYPE has an exact
8943 inverse, return it, else return NULL. */
8944
8945 tree
8946 exact_inverse (tree type, tree cst)
8947 {
8948 REAL_VALUE_TYPE r;
8949 tree unit_type, *elts;
8950 machine_mode mode;
8951 unsigned vec_nelts, i;
8952
8953 switch (TREE_CODE (cst))
8954 {
8955 case REAL_CST:
8956 r = TREE_REAL_CST (cst);
8957
8958 if (exact_real_inverse (TYPE_MODE (type), &r))
8959 return build_real (type, r);
8960
8961 return NULL_TREE;
8962
8963 case VECTOR_CST:
8964 vec_nelts = VECTOR_CST_NELTS (cst);
8965 elts = XALLOCAVEC (tree, vec_nelts);
8966 unit_type = TREE_TYPE (type);
8967 mode = TYPE_MODE (unit_type);
8968
8969 for (i = 0; i < vec_nelts; i++)
8970 {
8971 r = TREE_REAL_CST (VECTOR_CST_ELT (cst, i));
8972 if (!exact_real_inverse (mode, &r))
8973 return NULL_TREE;
8974 elts[i] = build_real (unit_type, r);
8975 }
8976
8977 return build_vector (type, elts);
8978
8979 default:
8980 return NULL_TREE;
8981 }
8982 }
8983
8984 /* Mask out the tz least significant bits of X of type TYPE where
8985 tz is the number of trailing zeroes in Y. */
8986 static wide_int
8987 mask_with_tz (tree type, const wide_int &x, const wide_int &y)
8988 {
8989 int tz = wi::ctz (y);
8990 if (tz > 0)
8991 return wi::mask (tz, true, TYPE_PRECISION (type)) & x;
8992 return x;
8993 }
8994
8995 /* Return true when T is an address and is known to be nonzero.
8996 For floating point we further ensure that T is not denormal.
8997 Similar logic is present in nonzero_address in rtlanal.h.
8998
8999 If the return value is based on the assumption that signed overflow
9000 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
9001 change *STRICT_OVERFLOW_P. */
9002
9003 static bool
9004 tree_expr_nonzero_warnv_p (tree t, bool *strict_overflow_p)
9005 {
9006 tree type = TREE_TYPE (t);
9007 enum tree_code code;
9008
9009 /* Doing something useful for floating point would need more work. */
9010 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
9011 return false;
9012
9013 code = TREE_CODE (t);
9014 switch (TREE_CODE_CLASS (code))
9015 {
9016 case tcc_unary:
9017 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9018 strict_overflow_p);
9019 case tcc_binary:
9020 case tcc_comparison:
9021 return tree_binary_nonzero_warnv_p (code, type,
9022 TREE_OPERAND (t, 0),
9023 TREE_OPERAND (t, 1),
9024 strict_overflow_p);
9025 case tcc_constant:
9026 case tcc_declaration:
9027 case tcc_reference:
9028 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9029
9030 default:
9031 break;
9032 }
9033
9034 switch (code)
9035 {
9036 case TRUTH_NOT_EXPR:
9037 return tree_unary_nonzero_warnv_p (code, type, TREE_OPERAND (t, 0),
9038 strict_overflow_p);
9039
9040 case TRUTH_AND_EXPR:
9041 case TRUTH_OR_EXPR:
9042 case TRUTH_XOR_EXPR:
9043 return tree_binary_nonzero_warnv_p (code, type,
9044 TREE_OPERAND (t, 0),
9045 TREE_OPERAND (t, 1),
9046 strict_overflow_p);
9047
9048 case COND_EXPR:
9049 case CONSTRUCTOR:
9050 case OBJ_TYPE_REF:
9051 case ASSERT_EXPR:
9052 case ADDR_EXPR:
9053 case WITH_SIZE_EXPR:
9054 case SSA_NAME:
9055 return tree_single_nonzero_warnv_p (t, strict_overflow_p);
9056
9057 case COMPOUND_EXPR:
9058 case MODIFY_EXPR:
9059 case BIND_EXPR:
9060 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
9061 strict_overflow_p);
9062
9063 case SAVE_EXPR:
9064 return tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 0),
9065 strict_overflow_p);
9066
9067 case CALL_EXPR:
9068 {
9069 tree fndecl = get_callee_fndecl (t);
9070 if (!fndecl) return false;
9071 if (flag_delete_null_pointer_checks && !flag_check_new
9072 && DECL_IS_OPERATOR_NEW (fndecl)
9073 && !TREE_NOTHROW (fndecl))
9074 return true;
9075 if (flag_delete_null_pointer_checks
9076 && lookup_attribute ("returns_nonnull",
9077 TYPE_ATTRIBUTES (TREE_TYPE (fndecl))))
9078 return true;
9079 return alloca_call_p (t);
9080 }
9081
9082 default:
9083 break;
9084 }
9085 return false;
9086 }
9087
9088 /* Return true when T is an address and is known to be nonzero.
9089 Handle warnings about undefined signed overflow. */
9090
9091 static bool
9092 tree_expr_nonzero_p (tree t)
9093 {
9094 bool ret, strict_overflow_p;
9095
9096 strict_overflow_p = false;
9097 ret = tree_expr_nonzero_warnv_p (t, &strict_overflow_p);
9098 if (strict_overflow_p)
9099 fold_overflow_warning (("assuming signed overflow does not occur when "
9100 "determining that expression is always "
9101 "non-zero"),
9102 WARN_STRICT_OVERFLOW_MISC);
9103 return ret;
9104 }
9105
9106 /* Return true if T is known not to be equal to an integer W. */
9107
9108 bool
9109 expr_not_equal_to (tree t, const wide_int &w)
9110 {
9111 wide_int min, max, nz;
9112 value_range_type rtype;
9113 switch (TREE_CODE (t))
9114 {
9115 case INTEGER_CST:
9116 return wi::ne_p (t, w);
9117
9118 case SSA_NAME:
9119 if (!INTEGRAL_TYPE_P (TREE_TYPE (t)))
9120 return false;
9121 rtype = get_range_info (t, &min, &max);
9122 if (rtype == VR_RANGE)
9123 {
9124 if (wi::lt_p (max, w, TYPE_SIGN (TREE_TYPE (t))))
9125 return true;
9126 if (wi::lt_p (w, min, TYPE_SIGN (TREE_TYPE (t))))
9127 return true;
9128 }
9129 else if (rtype == VR_ANTI_RANGE
9130 && wi::le_p (min, w, TYPE_SIGN (TREE_TYPE (t)))
9131 && wi::le_p (w, max, TYPE_SIGN (TREE_TYPE (t))))
9132 return true;
9133 /* If T has some known zero bits and W has any of those bits set,
9134 then T is known not to be equal to W. */
9135 if (wi::ne_p (wi::zext (wi::bit_and_not (w, get_nonzero_bits (t)),
9136 TYPE_PRECISION (TREE_TYPE (t))), 0))
9137 return true;
9138 return false;
9139
9140 default:
9141 return false;
9142 }
9143 }
9144
9145 /* Fold a binary expression of code CODE and type TYPE with operands
9146 OP0 and OP1. LOC is the location of the resulting expression.
9147 Return the folded expression if folding is successful. Otherwise,
9148 return NULL_TREE. */
9149
9150 tree
9151 fold_binary_loc (location_t loc,
9152 enum tree_code code, tree type, tree op0, tree op1)
9153 {
9154 enum tree_code_class kind = TREE_CODE_CLASS (code);
9155 tree arg0, arg1, tem;
9156 tree t1 = NULL_TREE;
9157 bool strict_overflow_p;
9158 unsigned int prec;
9159
9160 gcc_assert (IS_EXPR_CODE_CLASS (kind)
9161 && TREE_CODE_LENGTH (code) == 2
9162 && op0 != NULL_TREE
9163 && op1 != NULL_TREE);
9164
9165 arg0 = op0;
9166 arg1 = op1;
9167
9168 /* Strip any conversions that don't change the mode. This is
9169 safe for every expression, except for a comparison expression
9170 because its signedness is derived from its operands. So, in
9171 the latter case, only strip conversions that don't change the
9172 signedness. MIN_EXPR/MAX_EXPR also need signedness of arguments
9173 preserved.
9174
9175 Note that this is done as an internal manipulation within the
9176 constant folder, in order to find the simplest representation
9177 of the arguments so that their form can be studied. In any
9178 cases, the appropriate type conversions should be put back in
9179 the tree that will get out of the constant folder. */
9180
9181 if (kind == tcc_comparison || code == MIN_EXPR || code == MAX_EXPR)
9182 {
9183 STRIP_SIGN_NOPS (arg0);
9184 STRIP_SIGN_NOPS (arg1);
9185 }
9186 else
9187 {
9188 STRIP_NOPS (arg0);
9189 STRIP_NOPS (arg1);
9190 }
9191
9192 /* Note that TREE_CONSTANT isn't enough: static var addresses are
9193 constant but we can't do arithmetic on them. */
9194 if (CONSTANT_CLASS_P (arg0) && CONSTANT_CLASS_P (arg1))
9195 {
9196 tem = const_binop (code, type, arg0, arg1);
9197 if (tem != NULL_TREE)
9198 {
9199 if (TREE_TYPE (tem) != type)
9200 tem = fold_convert_loc (loc, type, tem);
9201 return tem;
9202 }
9203 }
9204
9205 /* If this is a commutative operation, and ARG0 is a constant, move it
9206 to ARG1 to reduce the number of tests below. */
9207 if (commutative_tree_code (code)
9208 && tree_swap_operands_p (arg0, arg1, true))
9209 return fold_build2_loc (loc, code, type, op1, op0);
9210
9211 /* Likewise if this is a comparison, and ARG0 is a constant, move it
9212 to ARG1 to reduce the number of tests below. */
9213 if (kind == tcc_comparison
9214 && tree_swap_operands_p (arg0, arg1, true))
9215 return fold_build2_loc (loc, swap_tree_comparison (code), type, op1, op0);
9216
9217 tem = generic_simplify (loc, code, type, op0, op1);
9218 if (tem)
9219 return tem;
9220
9221 /* ARG0 is the first operand of EXPR, and ARG1 is the second operand.
9222
9223 First check for cases where an arithmetic operation is applied to a
9224 compound, conditional, or comparison operation. Push the arithmetic
9225 operation inside the compound or conditional to see if any folding
9226 can then be done. Convert comparison to conditional for this purpose.
9227 The also optimizes non-constant cases that used to be done in
9228 expand_expr.
9229
9230 Before we do that, see if this is a BIT_AND_EXPR or a BIT_IOR_EXPR,
9231 one of the operands is a comparison and the other is a comparison, a
9232 BIT_AND_EXPR with the constant 1, or a truth value. In that case, the
9233 code below would make the expression more complex. Change it to a
9234 TRUTH_{AND,OR}_EXPR. Likewise, convert a similar NE_EXPR to
9235 TRUTH_XOR_EXPR and an EQ_EXPR to the inversion of a TRUTH_XOR_EXPR. */
9236
9237 if ((code == BIT_AND_EXPR || code == BIT_IOR_EXPR
9238 || code == EQ_EXPR || code == NE_EXPR)
9239 && TREE_CODE (type) != VECTOR_TYPE
9240 && ((truth_value_p (TREE_CODE (arg0))
9241 && (truth_value_p (TREE_CODE (arg1))
9242 || (TREE_CODE (arg1) == BIT_AND_EXPR
9243 && integer_onep (TREE_OPERAND (arg1, 1)))))
9244 || (truth_value_p (TREE_CODE (arg1))
9245 && (truth_value_p (TREE_CODE (arg0))
9246 || (TREE_CODE (arg0) == BIT_AND_EXPR
9247 && integer_onep (TREE_OPERAND (arg0, 1)))))))
9248 {
9249 tem = fold_build2_loc (loc, code == BIT_AND_EXPR ? TRUTH_AND_EXPR
9250 : code == BIT_IOR_EXPR ? TRUTH_OR_EXPR
9251 : TRUTH_XOR_EXPR,
9252 boolean_type_node,
9253 fold_convert_loc (loc, boolean_type_node, arg0),
9254 fold_convert_loc (loc, boolean_type_node, arg1));
9255
9256 if (code == EQ_EXPR)
9257 tem = invert_truthvalue_loc (loc, tem);
9258
9259 return fold_convert_loc (loc, type, tem);
9260 }
9261
9262 if (TREE_CODE_CLASS (code) == tcc_binary
9263 || TREE_CODE_CLASS (code) == tcc_comparison)
9264 {
9265 if (TREE_CODE (arg0) == COMPOUND_EXPR)
9266 {
9267 tem = fold_build2_loc (loc, code, type,
9268 fold_convert_loc (loc, TREE_TYPE (op0),
9269 TREE_OPERAND (arg0, 1)), op1);
9270 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg0, 0),
9271 tem);
9272 }
9273 if (TREE_CODE (arg1) == COMPOUND_EXPR
9274 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
9275 {
9276 tem = fold_build2_loc (loc, code, type, op0,
9277 fold_convert_loc (loc, TREE_TYPE (op1),
9278 TREE_OPERAND (arg1, 1)));
9279 return build2_loc (loc, COMPOUND_EXPR, type, TREE_OPERAND (arg1, 0),
9280 tem);
9281 }
9282
9283 if (TREE_CODE (arg0) == COND_EXPR
9284 || TREE_CODE (arg0) == VEC_COND_EXPR
9285 || COMPARISON_CLASS_P (arg0))
9286 {
9287 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9288 arg0, arg1,
9289 /*cond_first_p=*/1);
9290 if (tem != NULL_TREE)
9291 return tem;
9292 }
9293
9294 if (TREE_CODE (arg1) == COND_EXPR
9295 || TREE_CODE (arg1) == VEC_COND_EXPR
9296 || COMPARISON_CLASS_P (arg1))
9297 {
9298 tem = fold_binary_op_with_conditional_arg (loc, code, type, op0, op1,
9299 arg1, arg0,
9300 /*cond_first_p=*/0);
9301 if (tem != NULL_TREE)
9302 return tem;
9303 }
9304 }
9305
9306 switch (code)
9307 {
9308 case MEM_REF:
9309 /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2]. */
9310 if (TREE_CODE (arg0) == ADDR_EXPR
9311 && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
9312 {
9313 tree iref = TREE_OPERAND (arg0, 0);
9314 return fold_build2 (MEM_REF, type,
9315 TREE_OPERAND (iref, 0),
9316 int_const_binop (PLUS_EXPR, arg1,
9317 TREE_OPERAND (iref, 1)));
9318 }
9319
9320 /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2]. */
9321 if (TREE_CODE (arg0) == ADDR_EXPR
9322 && handled_component_p (TREE_OPERAND (arg0, 0)))
9323 {
9324 tree base;
9325 HOST_WIDE_INT coffset;
9326 base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
9327 &coffset);
9328 if (!base)
9329 return NULL_TREE;
9330 return fold_build2 (MEM_REF, type,
9331 build_fold_addr_expr (base),
9332 int_const_binop (PLUS_EXPR, arg1,
9333 size_int (coffset)));
9334 }
9335
9336 return NULL_TREE;
9337
9338 case POINTER_PLUS_EXPR:
9339 /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
9340 if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9341 && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9342 return fold_convert_loc (loc, type,
9343 fold_build2_loc (loc, PLUS_EXPR, sizetype,
9344 fold_convert_loc (loc, sizetype,
9345 arg1),
9346 fold_convert_loc (loc, sizetype,
9347 arg0)));
9348
9349 return NULL_TREE;
9350
9351 case PLUS_EXPR:
9352 if (INTEGRAL_TYPE_P (type) || VECTOR_INTEGER_TYPE_P (type))
9353 {
9354 /* X + (X / CST) * -CST is X % CST. */
9355 if (TREE_CODE (arg1) == MULT_EXPR
9356 && TREE_CODE (TREE_OPERAND (arg1, 0)) == TRUNC_DIV_EXPR
9357 && operand_equal_p (arg0,
9358 TREE_OPERAND (TREE_OPERAND (arg1, 0), 0), 0))
9359 {
9360 tree cst0 = TREE_OPERAND (TREE_OPERAND (arg1, 0), 1);
9361 tree cst1 = TREE_OPERAND (arg1, 1);
9362 tree sum = fold_binary_loc (loc, PLUS_EXPR, TREE_TYPE (cst1),
9363 cst1, cst0);
9364 if (sum && integer_zerop (sum))
9365 return fold_convert_loc (loc, type,
9366 fold_build2_loc (loc, TRUNC_MOD_EXPR,
9367 TREE_TYPE (arg0), arg0,
9368 cst0));
9369 }
9370 }
9371
9372 /* Handle (A1 * C1) + (A2 * C2) with A1, A2 or C1, C2 being the same or
9373 one. Make sure the type is not saturating and has the signedness of
9374 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9375 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9376 if ((TREE_CODE (arg0) == MULT_EXPR
9377 || TREE_CODE (arg1) == MULT_EXPR)
9378 && !TYPE_SATURATING (type)
9379 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9380 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9381 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9382 {
9383 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9384 if (tem)
9385 return tem;
9386 }
9387
9388 if (! FLOAT_TYPE_P (type))
9389 {
9390 /* Reassociate (plus (plus (mult) (foo)) (mult)) as
9391 (plus (plus (mult) (mult)) (foo)) so that we can
9392 take advantage of the factoring cases below. */
9393 if (ANY_INTEGRAL_TYPE_P (type)
9394 && TYPE_OVERFLOW_WRAPS (type)
9395 && (((TREE_CODE (arg0) == PLUS_EXPR
9396 || TREE_CODE (arg0) == MINUS_EXPR)
9397 && TREE_CODE (arg1) == MULT_EXPR)
9398 || ((TREE_CODE (arg1) == PLUS_EXPR
9399 || TREE_CODE (arg1) == MINUS_EXPR)
9400 && TREE_CODE (arg0) == MULT_EXPR)))
9401 {
9402 tree parg0, parg1, parg, marg;
9403 enum tree_code pcode;
9404
9405 if (TREE_CODE (arg1) == MULT_EXPR)
9406 parg = arg0, marg = arg1;
9407 else
9408 parg = arg1, marg = arg0;
9409 pcode = TREE_CODE (parg);
9410 parg0 = TREE_OPERAND (parg, 0);
9411 parg1 = TREE_OPERAND (parg, 1);
9412 STRIP_NOPS (parg0);
9413 STRIP_NOPS (parg1);
9414
9415 if (TREE_CODE (parg0) == MULT_EXPR
9416 && TREE_CODE (parg1) != MULT_EXPR)
9417 return fold_build2_loc (loc, pcode, type,
9418 fold_build2_loc (loc, PLUS_EXPR, type,
9419 fold_convert_loc (loc, type,
9420 parg0),
9421 fold_convert_loc (loc, type,
9422 marg)),
9423 fold_convert_loc (loc, type, parg1));
9424 if (TREE_CODE (parg0) != MULT_EXPR
9425 && TREE_CODE (parg1) == MULT_EXPR)
9426 return
9427 fold_build2_loc (loc, PLUS_EXPR, type,
9428 fold_convert_loc (loc, type, parg0),
9429 fold_build2_loc (loc, pcode, type,
9430 fold_convert_loc (loc, type, marg),
9431 fold_convert_loc (loc, type,
9432 parg1)));
9433 }
9434 }
9435 else
9436 {
9437 /* Fold __complex__ ( x, 0 ) + __complex__ ( 0, y )
9438 to __complex__ ( x, y ). This is not the same for SNaNs or
9439 if signed zeros are involved. */
9440 if (!HONOR_SNANS (element_mode (arg0))
9441 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9442 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9443 {
9444 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9445 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9446 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9447 bool arg0rz = false, arg0iz = false;
9448 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9449 || (arg0i && (arg0iz = real_zerop (arg0i))))
9450 {
9451 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9452 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9453 if (arg0rz && arg1i && real_zerop (arg1i))
9454 {
9455 tree rp = arg1r ? arg1r
9456 : build1 (REALPART_EXPR, rtype, arg1);
9457 tree ip = arg0i ? arg0i
9458 : build1 (IMAGPART_EXPR, rtype, arg0);
9459 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9460 }
9461 else if (arg0iz && arg1r && real_zerop (arg1r))
9462 {
9463 tree rp = arg0r ? arg0r
9464 : build1 (REALPART_EXPR, rtype, arg0);
9465 tree ip = arg1i ? arg1i
9466 : build1 (IMAGPART_EXPR, rtype, arg1);
9467 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9468 }
9469 }
9470 }
9471
9472 if (flag_unsafe_math_optimizations
9473 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9474 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9475 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9476 return tem;
9477
9478 /* Convert a + (b*c + d*e) into (a + b*c) + d*e.
9479 We associate floats only if the user has specified
9480 -fassociative-math. */
9481 if (flag_associative_math
9482 && TREE_CODE (arg1) == PLUS_EXPR
9483 && TREE_CODE (arg0) != MULT_EXPR)
9484 {
9485 tree tree10 = TREE_OPERAND (arg1, 0);
9486 tree tree11 = TREE_OPERAND (arg1, 1);
9487 if (TREE_CODE (tree11) == MULT_EXPR
9488 && TREE_CODE (tree10) == MULT_EXPR)
9489 {
9490 tree tree0;
9491 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, arg0, tree10);
9492 return fold_build2_loc (loc, PLUS_EXPR, type, tree0, tree11);
9493 }
9494 }
9495 /* Convert (b*c + d*e) + a into b*c + (d*e +a).
9496 We associate floats only if the user has specified
9497 -fassociative-math. */
9498 if (flag_associative_math
9499 && TREE_CODE (arg0) == PLUS_EXPR
9500 && TREE_CODE (arg1) != MULT_EXPR)
9501 {
9502 tree tree00 = TREE_OPERAND (arg0, 0);
9503 tree tree01 = TREE_OPERAND (arg0, 1);
9504 if (TREE_CODE (tree01) == MULT_EXPR
9505 && TREE_CODE (tree00) == MULT_EXPR)
9506 {
9507 tree tree0;
9508 tree0 = fold_build2_loc (loc, PLUS_EXPR, type, tree01, arg1);
9509 return fold_build2_loc (loc, PLUS_EXPR, type, tree00, tree0);
9510 }
9511 }
9512 }
9513
9514 bit_rotate:
9515 /* (A << C1) + (A >> C2) if A is unsigned and C1+C2 is the size of A
9516 is a rotate of A by C1 bits. */
9517 /* (A << B) + (A >> (Z - B)) if A is unsigned and Z is the size of A
9518 is a rotate of A by B bits. */
9519 {
9520 enum tree_code code0, code1;
9521 tree rtype;
9522 code0 = TREE_CODE (arg0);
9523 code1 = TREE_CODE (arg1);
9524 if (((code0 == RSHIFT_EXPR && code1 == LSHIFT_EXPR)
9525 || (code1 == RSHIFT_EXPR && code0 == LSHIFT_EXPR))
9526 && operand_equal_p (TREE_OPERAND (arg0, 0),
9527 TREE_OPERAND (arg1, 0), 0)
9528 && (rtype = TREE_TYPE (TREE_OPERAND (arg0, 0)),
9529 TYPE_UNSIGNED (rtype))
9530 /* Only create rotates in complete modes. Other cases are not
9531 expanded properly. */
9532 && (element_precision (rtype)
9533 == GET_MODE_UNIT_PRECISION (TYPE_MODE (rtype))))
9534 {
9535 tree tree01, tree11;
9536 enum tree_code code01, code11;
9537
9538 tree01 = TREE_OPERAND (arg0, 1);
9539 tree11 = TREE_OPERAND (arg1, 1);
9540 STRIP_NOPS (tree01);
9541 STRIP_NOPS (tree11);
9542 code01 = TREE_CODE (tree01);
9543 code11 = TREE_CODE (tree11);
9544 if (code01 == INTEGER_CST
9545 && code11 == INTEGER_CST
9546 && (wi::to_widest (tree01) + wi::to_widest (tree11)
9547 == element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)))))
9548 {
9549 tem = build2_loc (loc, LROTATE_EXPR,
9550 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9551 TREE_OPERAND (arg0, 0),
9552 code0 == LSHIFT_EXPR
9553 ? TREE_OPERAND (arg0, 1)
9554 : TREE_OPERAND (arg1, 1));
9555 return fold_convert_loc (loc, type, tem);
9556 }
9557 else if (code11 == MINUS_EXPR)
9558 {
9559 tree tree110, tree111;
9560 tree110 = TREE_OPERAND (tree11, 0);
9561 tree111 = TREE_OPERAND (tree11, 1);
9562 STRIP_NOPS (tree110);
9563 STRIP_NOPS (tree111);
9564 if (TREE_CODE (tree110) == INTEGER_CST
9565 && 0 == compare_tree_int (tree110,
9566 element_precision
9567 (TREE_TYPE (TREE_OPERAND
9568 (arg0, 0))))
9569 && operand_equal_p (tree01, tree111, 0))
9570 return
9571 fold_convert_loc (loc, type,
9572 build2 ((code0 == LSHIFT_EXPR
9573 ? LROTATE_EXPR
9574 : RROTATE_EXPR),
9575 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9576 TREE_OPERAND (arg0, 0),
9577 TREE_OPERAND (arg0, 1)));
9578 }
9579 else if (code01 == MINUS_EXPR)
9580 {
9581 tree tree010, tree011;
9582 tree010 = TREE_OPERAND (tree01, 0);
9583 tree011 = TREE_OPERAND (tree01, 1);
9584 STRIP_NOPS (tree010);
9585 STRIP_NOPS (tree011);
9586 if (TREE_CODE (tree010) == INTEGER_CST
9587 && 0 == compare_tree_int (tree010,
9588 element_precision
9589 (TREE_TYPE (TREE_OPERAND
9590 (arg0, 0))))
9591 && operand_equal_p (tree11, tree011, 0))
9592 return fold_convert_loc
9593 (loc, type,
9594 build2 ((code0 != LSHIFT_EXPR
9595 ? LROTATE_EXPR
9596 : RROTATE_EXPR),
9597 TREE_TYPE (TREE_OPERAND (arg0, 0)),
9598 TREE_OPERAND (arg0, 0), TREE_OPERAND (arg1, 1)));
9599 }
9600 }
9601 }
9602
9603 associate:
9604 /* In most languages, can't associate operations on floats through
9605 parentheses. Rather than remember where the parentheses were, we
9606 don't associate floats at all, unless the user has specified
9607 -fassociative-math.
9608 And, we need to make sure type is not saturating. */
9609
9610 if ((! FLOAT_TYPE_P (type) || flag_associative_math)
9611 && !TYPE_SATURATING (type))
9612 {
9613 tree var0, con0, lit0, minus_lit0;
9614 tree var1, con1, lit1, minus_lit1;
9615 tree atype = type;
9616 bool ok = true;
9617
9618 /* Split both trees into variables, constants, and literals. Then
9619 associate each group together, the constants with literals,
9620 then the result with variables. This increases the chances of
9621 literals being recombined later and of generating relocatable
9622 expressions for the sum of a constant and literal. */
9623 var0 = split_tree (arg0, code, &con0, &lit0, &minus_lit0, 0);
9624 var1 = split_tree (arg1, code, &con1, &lit1, &minus_lit1,
9625 code == MINUS_EXPR);
9626
9627 /* Recombine MINUS_EXPR operands by using PLUS_EXPR. */
9628 if (code == MINUS_EXPR)
9629 code = PLUS_EXPR;
9630
9631 /* With undefined overflow prefer doing association in a type
9632 which wraps on overflow, if that is one of the operand types. */
9633 if ((POINTER_TYPE_P (type) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9634 || (INTEGRAL_TYPE_P (type) && !TYPE_OVERFLOW_WRAPS (type)))
9635 {
9636 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
9637 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
9638 atype = TREE_TYPE (arg0);
9639 else if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
9640 && TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg1)))
9641 atype = TREE_TYPE (arg1);
9642 gcc_assert (TYPE_PRECISION (atype) == TYPE_PRECISION (type));
9643 }
9644
9645 /* With undefined overflow we can only associate constants with one
9646 variable, and constants whose association doesn't overflow. */
9647 if ((POINTER_TYPE_P (atype) && POINTER_TYPE_OVERFLOW_UNDEFINED)
9648 || (INTEGRAL_TYPE_P (atype) && !TYPE_OVERFLOW_WRAPS (atype)))
9649 {
9650 if (var0 && var1)
9651 {
9652 tree tmp0 = var0;
9653 tree tmp1 = var1;
9654 bool one_neg = false;
9655
9656 if (TREE_CODE (tmp0) == NEGATE_EXPR)
9657 {
9658 tmp0 = TREE_OPERAND (tmp0, 0);
9659 one_neg = !one_neg;
9660 }
9661 if (CONVERT_EXPR_P (tmp0)
9662 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9663 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp0, 0)))
9664 <= TYPE_PRECISION (atype)))
9665 tmp0 = TREE_OPERAND (tmp0, 0);
9666 if (TREE_CODE (tmp1) == NEGATE_EXPR)
9667 {
9668 tmp1 = TREE_OPERAND (tmp1, 0);
9669 one_neg = !one_neg;
9670 }
9671 if (CONVERT_EXPR_P (tmp1)
9672 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9673 && (TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (tmp1, 0)))
9674 <= TYPE_PRECISION (atype)))
9675 tmp1 = TREE_OPERAND (tmp1, 0);
9676 /* The only case we can still associate with two variables
9677 is if they cancel out. */
9678 if (!one_neg
9679 || !operand_equal_p (tmp0, tmp1, 0))
9680 ok = false;
9681 }
9682 }
9683
9684 /* Only do something if we found more than two objects. Otherwise,
9685 nothing has changed and we risk infinite recursion. */
9686 if (ok
9687 && (2 < ((var0 != 0) + (var1 != 0)
9688 + (con0 != 0) + (con1 != 0)
9689 + (lit0 != 0) + (lit1 != 0)
9690 + (minus_lit0 != 0) + (minus_lit1 != 0))))
9691 {
9692 bool any_overflows = false;
9693 if (lit0) any_overflows |= TREE_OVERFLOW (lit0);
9694 if (lit1) any_overflows |= TREE_OVERFLOW (lit1);
9695 if (minus_lit0) any_overflows |= TREE_OVERFLOW (minus_lit0);
9696 if (minus_lit1) any_overflows |= TREE_OVERFLOW (minus_lit1);
9697 var0 = associate_trees (loc, var0, var1, code, atype);
9698 con0 = associate_trees (loc, con0, con1, code, atype);
9699 lit0 = associate_trees (loc, lit0, lit1, code, atype);
9700 minus_lit0 = associate_trees (loc, minus_lit0, minus_lit1,
9701 code, atype);
9702
9703 /* Preserve the MINUS_EXPR if the negative part of the literal is
9704 greater than the positive part. Otherwise, the multiplicative
9705 folding code (i.e extract_muldiv) may be fooled in case
9706 unsigned constants are subtracted, like in the following
9707 example: ((X*2 + 4) - 8U)/2. */
9708 if (minus_lit0 && lit0)
9709 {
9710 if (TREE_CODE (lit0) == INTEGER_CST
9711 && TREE_CODE (minus_lit0) == INTEGER_CST
9712 && tree_int_cst_lt (lit0, minus_lit0))
9713 {
9714 minus_lit0 = associate_trees (loc, minus_lit0, lit0,
9715 MINUS_EXPR, atype);
9716 lit0 = 0;
9717 }
9718 else
9719 {
9720 lit0 = associate_trees (loc, lit0, minus_lit0,
9721 MINUS_EXPR, atype);
9722 minus_lit0 = 0;
9723 }
9724 }
9725
9726 /* Don't introduce overflows through reassociation. */
9727 if (!any_overflows
9728 && ((lit0 && TREE_OVERFLOW_P (lit0))
9729 || (minus_lit0 && TREE_OVERFLOW_P (minus_lit0))))
9730 return NULL_TREE;
9731
9732 if (minus_lit0)
9733 {
9734 if (con0 == 0)
9735 return
9736 fold_convert_loc (loc, type,
9737 associate_trees (loc, var0, minus_lit0,
9738 MINUS_EXPR, atype));
9739 else
9740 {
9741 con0 = associate_trees (loc, con0, minus_lit0,
9742 MINUS_EXPR, atype);
9743 return
9744 fold_convert_loc (loc, type,
9745 associate_trees (loc, var0, con0,
9746 PLUS_EXPR, atype));
9747 }
9748 }
9749
9750 con0 = associate_trees (loc, con0, lit0, code, atype);
9751 return
9752 fold_convert_loc (loc, type, associate_trees (loc, var0, con0,
9753 code, atype));
9754 }
9755 }
9756
9757 return NULL_TREE;
9758
9759 case MINUS_EXPR:
9760 /* (-A) - B -> (-B) - A where B is easily negated and we can swap. */
9761 if (TREE_CODE (arg0) == NEGATE_EXPR
9762 && negate_expr_p (op1)
9763 && reorder_operands_p (arg0, arg1))
9764 return fold_build2_loc (loc, MINUS_EXPR, type,
9765 negate_expr (op1),
9766 fold_convert_loc (loc, type,
9767 TREE_OPERAND (arg0, 0)));
9768
9769 /* Fold __complex__ ( x, 0 ) - __complex__ ( 0, y ) to
9770 __complex__ ( x, -y ). This is not the same for SNaNs or if
9771 signed zeros are involved. */
9772 if (!HONOR_SNANS (element_mode (arg0))
9773 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9774 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0)))
9775 {
9776 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9777 tree arg0r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg0);
9778 tree arg0i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg0);
9779 bool arg0rz = false, arg0iz = false;
9780 if ((arg0r && (arg0rz = real_zerop (arg0r)))
9781 || (arg0i && (arg0iz = real_zerop (arg0i))))
9782 {
9783 tree arg1r = fold_unary_loc (loc, REALPART_EXPR, rtype, arg1);
9784 tree arg1i = fold_unary_loc (loc, IMAGPART_EXPR, rtype, arg1);
9785 if (arg0rz && arg1i && real_zerop (arg1i))
9786 {
9787 tree rp = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9788 arg1r ? arg1r
9789 : build1 (REALPART_EXPR, rtype, arg1));
9790 tree ip = arg0i ? arg0i
9791 : build1 (IMAGPART_EXPR, rtype, arg0);
9792 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9793 }
9794 else if (arg0iz && arg1r && real_zerop (arg1r))
9795 {
9796 tree rp = arg0r ? arg0r
9797 : build1 (REALPART_EXPR, rtype, arg0);
9798 tree ip = fold_build1_loc (loc, NEGATE_EXPR, rtype,
9799 arg1i ? arg1i
9800 : build1 (IMAGPART_EXPR, rtype, arg1));
9801 return fold_build2_loc (loc, COMPLEX_EXPR, type, rp, ip);
9802 }
9803 }
9804 }
9805
9806 /* A - B -> A + (-B) if B is easily negatable. */
9807 if (negate_expr_p (op1)
9808 && ! TYPE_OVERFLOW_SANITIZED (type)
9809 && ((FLOAT_TYPE_P (type)
9810 /* Avoid this transformation if B is a positive REAL_CST. */
9811 && (TREE_CODE (op1) != REAL_CST
9812 || REAL_VALUE_NEGATIVE (TREE_REAL_CST (op1))))
9813 || INTEGRAL_TYPE_P (type)))
9814 return fold_build2_loc (loc, PLUS_EXPR, type,
9815 fold_convert_loc (loc, type, arg0),
9816 negate_expr (op1));
9817
9818 /* Fold &a[i] - &a[j] to i-j. */
9819 if (TREE_CODE (arg0) == ADDR_EXPR
9820 && TREE_CODE (TREE_OPERAND (arg0, 0)) == ARRAY_REF
9821 && TREE_CODE (arg1) == ADDR_EXPR
9822 && TREE_CODE (TREE_OPERAND (arg1, 0)) == ARRAY_REF)
9823 {
9824 tree tem = fold_addr_of_array_ref_difference (loc, type,
9825 TREE_OPERAND (arg0, 0),
9826 TREE_OPERAND (arg1, 0));
9827 if (tem)
9828 return tem;
9829 }
9830
9831 if (FLOAT_TYPE_P (type)
9832 && flag_unsafe_math_optimizations
9833 && (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
9834 && (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
9835 && (tem = distribute_real_division (loc, code, type, arg0, arg1)))
9836 return tem;
9837
9838 /* Handle (A1 * C1) - (A2 * C2) with A1, A2 or C1, C2 being the same or
9839 one. Make sure the type is not saturating and has the signedness of
9840 the stripped operands, as fold_plusminus_mult_expr will re-associate.
9841 ??? The latter condition should use TYPE_OVERFLOW_* flags instead. */
9842 if ((TREE_CODE (arg0) == MULT_EXPR
9843 || TREE_CODE (arg1) == MULT_EXPR)
9844 && !TYPE_SATURATING (type)
9845 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg0))
9846 && TYPE_UNSIGNED (type) == TYPE_UNSIGNED (TREE_TYPE (arg1))
9847 && (!FLOAT_TYPE_P (type) || flag_associative_math))
9848 {
9849 tree tem = fold_plusminus_mult_expr (loc, code, type, arg0, arg1);
9850 if (tem)
9851 return tem;
9852 }
9853
9854 goto associate;
9855
9856 case MULT_EXPR:
9857 if (! FLOAT_TYPE_P (type))
9858 {
9859 /* Transform x * -C into -x * C if x is easily negatable. */
9860 if (TREE_CODE (op1) == INTEGER_CST
9861 && tree_int_cst_sgn (op1) == -1
9862 && negate_expr_p (op0)
9863 && (tem = negate_expr (op1)) != op1
9864 && ! TREE_OVERFLOW (tem))
9865 return fold_build2_loc (loc, MULT_EXPR, type,
9866 fold_convert_loc (loc, type,
9867 negate_expr (op0)), tem);
9868
9869 /* (A + A) * C -> A * 2 * C */
9870 if (TREE_CODE (arg0) == PLUS_EXPR
9871 && TREE_CODE (arg1) == INTEGER_CST
9872 && operand_equal_p (TREE_OPERAND (arg0, 0),
9873 TREE_OPERAND (arg0, 1), 0))
9874 return fold_build2_loc (loc, MULT_EXPR, type,
9875 omit_one_operand_loc (loc, type,
9876 TREE_OPERAND (arg0, 0),
9877 TREE_OPERAND (arg0, 1)),
9878 fold_build2_loc (loc, MULT_EXPR, type,
9879 build_int_cst (type, 2) , arg1));
9880
9881 /* ((T) (X /[ex] C)) * C cancels out if the conversion is
9882 sign-changing only. */
9883 if (TREE_CODE (arg1) == INTEGER_CST
9884 && TREE_CODE (arg0) == EXACT_DIV_EXPR
9885 && operand_equal_p (arg1, TREE_OPERAND (arg0, 1), 0))
9886 return fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
9887
9888 strict_overflow_p = false;
9889 if (TREE_CODE (arg1) == INTEGER_CST
9890 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
9891 &strict_overflow_p)))
9892 {
9893 if (strict_overflow_p)
9894 fold_overflow_warning (("assuming signed overflow does not "
9895 "occur when simplifying "
9896 "multiplication"),
9897 WARN_STRICT_OVERFLOW_MISC);
9898 return fold_convert_loc (loc, type, tem);
9899 }
9900
9901 /* Optimize z * conj(z) for integer complex numbers. */
9902 if (TREE_CODE (arg0) == CONJ_EXPR
9903 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9904 return fold_mult_zconjz (loc, type, arg1);
9905 if (TREE_CODE (arg1) == CONJ_EXPR
9906 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9907 return fold_mult_zconjz (loc, type, arg0);
9908 }
9909 else
9910 {
9911 /* Fold z * +-I to __complex__ (-+__imag z, +-__real z).
9912 This is not the same for NaNs or if signed zeros are
9913 involved. */
9914 if (!HONOR_NANS (arg0)
9915 && !HONOR_SIGNED_ZEROS (element_mode (arg0))
9916 && COMPLEX_FLOAT_TYPE_P (TREE_TYPE (arg0))
9917 && TREE_CODE (arg1) == COMPLEX_CST
9918 && real_zerop (TREE_REALPART (arg1)))
9919 {
9920 tree rtype = TREE_TYPE (TREE_TYPE (arg0));
9921 if (real_onep (TREE_IMAGPART (arg1)))
9922 return
9923 fold_build2_loc (loc, COMPLEX_EXPR, type,
9924 negate_expr (fold_build1_loc (loc, IMAGPART_EXPR,
9925 rtype, arg0)),
9926 fold_build1_loc (loc, REALPART_EXPR, rtype, arg0));
9927 else if (real_minus_onep (TREE_IMAGPART (arg1)))
9928 return
9929 fold_build2_loc (loc, COMPLEX_EXPR, type,
9930 fold_build1_loc (loc, IMAGPART_EXPR, rtype, arg0),
9931 negate_expr (fold_build1_loc (loc, REALPART_EXPR,
9932 rtype, arg0)));
9933 }
9934
9935 /* Optimize z * conj(z) for floating point complex numbers.
9936 Guarded by flag_unsafe_math_optimizations as non-finite
9937 imaginary components don't produce scalar results. */
9938 if (flag_unsafe_math_optimizations
9939 && TREE_CODE (arg0) == CONJ_EXPR
9940 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
9941 return fold_mult_zconjz (loc, type, arg1);
9942 if (flag_unsafe_math_optimizations
9943 && TREE_CODE (arg1) == CONJ_EXPR
9944 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
9945 return fold_mult_zconjz (loc, type, arg0);
9946
9947 if (flag_unsafe_math_optimizations)
9948 {
9949
9950 /* Canonicalize x*x as pow(x,2.0), which is expanded as x*x. */
9951 if (!in_gimple_form
9952 && optimize
9953 && operand_equal_p (arg0, arg1, 0))
9954 {
9955 tree powfn = mathfn_built_in (type, BUILT_IN_POW);
9956
9957 if (powfn)
9958 {
9959 tree arg = build_real (type, dconst2);
9960 return build_call_expr_loc (loc, powfn, 2, arg0, arg);
9961 }
9962 }
9963 }
9964 }
9965 goto associate;
9966
9967 case BIT_IOR_EXPR:
9968 /* Canonicalize (X & C1) | C2. */
9969 if (TREE_CODE (arg0) == BIT_AND_EXPR
9970 && TREE_CODE (arg1) == INTEGER_CST
9971 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
9972 {
9973 int width = TYPE_PRECISION (type), w;
9974 wide_int c1 = TREE_OPERAND (arg0, 1);
9975 wide_int c2 = arg1;
9976
9977 /* If (C1&C2) == C1, then (X&C1)|C2 becomes (X,C2). */
9978 if ((c1 & c2) == c1)
9979 return omit_one_operand_loc (loc, type, arg1,
9980 TREE_OPERAND (arg0, 0));
9981
9982 wide_int msk = wi::mask (width, false,
9983 TYPE_PRECISION (TREE_TYPE (arg1)));
9984
9985 /* If (C1|C2) == ~0 then (X&C1)|C2 becomes X|C2. */
9986 if (msk.and_not (c1 | c2) == 0)
9987 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
9988 TREE_OPERAND (arg0, 0), arg1);
9989
9990 /* Minimize the number of bits set in C1, i.e. C1 := C1 & ~C2,
9991 unless (C1 & ~C2) | (C2 & C3) for some C3 is a mask of some
9992 mode which allows further optimizations. */
9993 c1 &= msk;
9994 c2 &= msk;
9995 wide_int c3 = c1.and_not (c2);
9996 for (w = BITS_PER_UNIT; w <= width; w <<= 1)
9997 {
9998 wide_int mask = wi::mask (w, false,
9999 TYPE_PRECISION (type));
10000 if (((c1 | c2) & mask) == mask && c1.and_not (mask) == 0)
10001 {
10002 c3 = mask;
10003 break;
10004 }
10005 }
10006
10007 if (c3 != c1)
10008 return fold_build2_loc (loc, BIT_IOR_EXPR, type,
10009 fold_build2_loc (loc, BIT_AND_EXPR, type,
10010 TREE_OPERAND (arg0, 0),
10011 wide_int_to_tree (type,
10012 c3)),
10013 arg1);
10014 }
10015
10016 /* See if this can be simplified into a rotate first. If that
10017 is unsuccessful continue in the association code. */
10018 goto bit_rotate;
10019
10020 case BIT_XOR_EXPR:
10021 /* Fold (X & 1) ^ 1 as (X & 1) == 0. */
10022 if (TREE_CODE (arg0) == BIT_AND_EXPR
10023 && INTEGRAL_TYPE_P (type)
10024 && integer_onep (TREE_OPERAND (arg0, 1))
10025 && integer_onep (arg1))
10026 return fold_build2_loc (loc, EQ_EXPR, type, arg0,
10027 build_zero_cst (TREE_TYPE (arg0)));
10028
10029 /* See if this can be simplified into a rotate first. If that
10030 is unsuccessful continue in the association code. */
10031 goto bit_rotate;
10032
10033 case BIT_AND_EXPR:
10034 /* Fold (X ^ 1) & 1 as (X & 1) == 0. */
10035 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10036 && INTEGRAL_TYPE_P (type)
10037 && integer_onep (TREE_OPERAND (arg0, 1))
10038 && integer_onep (arg1))
10039 {
10040 tree tem2;
10041 tem = TREE_OPERAND (arg0, 0);
10042 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10043 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10044 tem, tem2);
10045 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10046 build_zero_cst (TREE_TYPE (tem)));
10047 }
10048 /* Fold ~X & 1 as (X & 1) == 0. */
10049 if (TREE_CODE (arg0) == BIT_NOT_EXPR
10050 && INTEGRAL_TYPE_P (type)
10051 && integer_onep (arg1))
10052 {
10053 tree tem2;
10054 tem = TREE_OPERAND (arg0, 0);
10055 tem2 = fold_convert_loc (loc, TREE_TYPE (tem), arg1);
10056 tem2 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (tem),
10057 tem, tem2);
10058 return fold_build2_loc (loc, EQ_EXPR, type, tem2,
10059 build_zero_cst (TREE_TYPE (tem)));
10060 }
10061 /* Fold !X & 1 as X == 0. */
10062 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10063 && integer_onep (arg1))
10064 {
10065 tem = TREE_OPERAND (arg0, 0);
10066 return fold_build2_loc (loc, EQ_EXPR, type, tem,
10067 build_zero_cst (TREE_TYPE (tem)));
10068 }
10069
10070 /* Fold (X ^ Y) & Y as ~X & Y. */
10071 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10072 && operand_equal_p (TREE_OPERAND (arg0, 1), arg1, 0))
10073 {
10074 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10075 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10076 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10077 fold_convert_loc (loc, type, arg1));
10078 }
10079 /* Fold (X ^ Y) & X as ~Y & X. */
10080 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10081 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
10082 && reorder_operands_p (TREE_OPERAND (arg0, 1), arg1))
10083 {
10084 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10085 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10086 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10087 fold_convert_loc (loc, type, arg1));
10088 }
10089 /* Fold X & (X ^ Y) as X & ~Y. */
10090 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10091 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10092 {
10093 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10094 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10095 fold_convert_loc (loc, type, arg0),
10096 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem));
10097 }
10098 /* Fold X & (Y ^ X) as ~Y & X. */
10099 if (TREE_CODE (arg1) == BIT_XOR_EXPR
10100 && operand_equal_p (arg0, TREE_OPERAND (arg1, 1), 0)
10101 && reorder_operands_p (arg0, TREE_OPERAND (arg1, 0)))
10102 {
10103 tem = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10104 return fold_build2_loc (loc, BIT_AND_EXPR, type,
10105 fold_build1_loc (loc, BIT_NOT_EXPR, type, tem),
10106 fold_convert_loc (loc, type, arg0));
10107 }
10108
10109 /* Fold (X * Y) & -(1 << CST) to X * Y if Y is a constant
10110 multiple of 1 << CST. */
10111 if (TREE_CODE (arg1) == INTEGER_CST)
10112 {
10113 wide_int cst1 = arg1;
10114 wide_int ncst1 = -cst1;
10115 if ((cst1 & ncst1) == ncst1
10116 && multiple_of_p (type, arg0,
10117 wide_int_to_tree (TREE_TYPE (arg1), ncst1)))
10118 return fold_convert_loc (loc, type, arg0);
10119 }
10120
10121 /* Fold (X * CST1) & CST2 to zero if we can, or drop known zero
10122 bits from CST2. */
10123 if (TREE_CODE (arg1) == INTEGER_CST
10124 && TREE_CODE (arg0) == MULT_EXPR
10125 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10126 {
10127 wide_int warg1 = arg1;
10128 wide_int masked = mask_with_tz (type, warg1, TREE_OPERAND (arg0, 1));
10129
10130 if (masked == 0)
10131 return omit_two_operands_loc (loc, type, build_zero_cst (type),
10132 arg0, arg1);
10133 else if (masked != warg1)
10134 {
10135 /* Avoid the transform if arg1 is a mask of some
10136 mode which allows further optimizations. */
10137 int pop = wi::popcount (warg1);
10138 if (!(pop >= BITS_PER_UNIT
10139 && exact_log2 (pop) != -1
10140 && wi::mask (pop, false, warg1.get_precision ()) == warg1))
10141 return fold_build2_loc (loc, code, type, op0,
10142 wide_int_to_tree (type, masked));
10143 }
10144 }
10145
10146 /* For constants M and N, if M == (1LL << cst) - 1 && (N & M) == M,
10147 ((A & N) + B) & M -> (A + B) & M
10148 Similarly if (N & M) == 0,
10149 ((A | N) + B) & M -> (A + B) & M
10150 and for - instead of + (or unary - instead of +)
10151 and/or ^ instead of |.
10152 If B is constant and (B & M) == 0, fold into A & M. */
10153 if (TREE_CODE (arg1) == INTEGER_CST)
10154 {
10155 wide_int cst1 = arg1;
10156 if ((~cst1 != 0) && (cst1 & (cst1 + 1)) == 0
10157 && INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10158 && (TREE_CODE (arg0) == PLUS_EXPR
10159 || TREE_CODE (arg0) == MINUS_EXPR
10160 || TREE_CODE (arg0) == NEGATE_EXPR)
10161 && (TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0))
10162 || TREE_CODE (TREE_TYPE (arg0)) == INTEGER_TYPE))
10163 {
10164 tree pmop[2];
10165 int which = 0;
10166 wide_int cst0;
10167
10168 /* Now we know that arg0 is (C + D) or (C - D) or
10169 -C and arg1 (M) is == (1LL << cst) - 1.
10170 Store C into PMOP[0] and D into PMOP[1]. */
10171 pmop[0] = TREE_OPERAND (arg0, 0);
10172 pmop[1] = NULL;
10173 if (TREE_CODE (arg0) != NEGATE_EXPR)
10174 {
10175 pmop[1] = TREE_OPERAND (arg0, 1);
10176 which = 1;
10177 }
10178
10179 if ((wi::max_value (TREE_TYPE (arg0)) & cst1) != cst1)
10180 which = -1;
10181
10182 for (; which >= 0; which--)
10183 switch (TREE_CODE (pmop[which]))
10184 {
10185 case BIT_AND_EXPR:
10186 case BIT_IOR_EXPR:
10187 case BIT_XOR_EXPR:
10188 if (TREE_CODE (TREE_OPERAND (pmop[which], 1))
10189 != INTEGER_CST)
10190 break;
10191 cst0 = TREE_OPERAND (pmop[which], 1);
10192 cst0 &= cst1;
10193 if (TREE_CODE (pmop[which]) == BIT_AND_EXPR)
10194 {
10195 if (cst0 != cst1)
10196 break;
10197 }
10198 else if (cst0 != 0)
10199 break;
10200 /* If C or D is of the form (A & N) where
10201 (N & M) == M, or of the form (A | N) or
10202 (A ^ N) where (N & M) == 0, replace it with A. */
10203 pmop[which] = TREE_OPERAND (pmop[which], 0);
10204 break;
10205 case INTEGER_CST:
10206 /* If C or D is a N where (N & M) == 0, it can be
10207 omitted (assumed 0). */
10208 if ((TREE_CODE (arg0) == PLUS_EXPR
10209 || (TREE_CODE (arg0) == MINUS_EXPR && which == 0))
10210 && (cst1 & pmop[which]) == 0)
10211 pmop[which] = NULL;
10212 break;
10213 default:
10214 break;
10215 }
10216
10217 /* Only build anything new if we optimized one or both arguments
10218 above. */
10219 if (pmop[0] != TREE_OPERAND (arg0, 0)
10220 || (TREE_CODE (arg0) != NEGATE_EXPR
10221 && pmop[1] != TREE_OPERAND (arg0, 1)))
10222 {
10223 tree utype = TREE_TYPE (arg0);
10224 if (! TYPE_OVERFLOW_WRAPS (TREE_TYPE (arg0)))
10225 {
10226 /* Perform the operations in a type that has defined
10227 overflow behavior. */
10228 utype = unsigned_type_for (TREE_TYPE (arg0));
10229 if (pmop[0] != NULL)
10230 pmop[0] = fold_convert_loc (loc, utype, pmop[0]);
10231 if (pmop[1] != NULL)
10232 pmop[1] = fold_convert_loc (loc, utype, pmop[1]);
10233 }
10234
10235 if (TREE_CODE (arg0) == NEGATE_EXPR)
10236 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[0]);
10237 else if (TREE_CODE (arg0) == PLUS_EXPR)
10238 {
10239 if (pmop[0] != NULL && pmop[1] != NULL)
10240 tem = fold_build2_loc (loc, PLUS_EXPR, utype,
10241 pmop[0], pmop[1]);
10242 else if (pmop[0] != NULL)
10243 tem = pmop[0];
10244 else if (pmop[1] != NULL)
10245 tem = pmop[1];
10246 else
10247 return build_int_cst (type, 0);
10248 }
10249 else if (pmop[0] == NULL)
10250 tem = fold_build1_loc (loc, NEGATE_EXPR, utype, pmop[1]);
10251 else
10252 tem = fold_build2_loc (loc, MINUS_EXPR, utype,
10253 pmop[0], pmop[1]);
10254 /* TEM is now the new binary +, - or unary - replacement. */
10255 tem = fold_build2_loc (loc, BIT_AND_EXPR, utype, tem,
10256 fold_convert_loc (loc, utype, arg1));
10257 return fold_convert_loc (loc, type, tem);
10258 }
10259 }
10260 }
10261
10262 /* Simplify ((int)c & 0377) into (int)c, if c is unsigned char. */
10263 if (TREE_CODE (arg1) == INTEGER_CST && TREE_CODE (arg0) == NOP_EXPR
10264 && TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg0, 0))))
10265 {
10266 prec = element_precision (TREE_TYPE (TREE_OPERAND (arg0, 0)));
10267
10268 wide_int mask = wide_int::from (arg1, prec, UNSIGNED);
10269 if (mask == -1)
10270 return
10271 fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10272 }
10273
10274 goto associate;
10275
10276 case RDIV_EXPR:
10277 /* Don't touch a floating-point divide by zero unless the mode
10278 of the constant can represent infinity. */
10279 if (TREE_CODE (arg1) == REAL_CST
10280 && !MODE_HAS_INFINITIES (TYPE_MODE (TREE_TYPE (arg1)))
10281 && real_zerop (arg1))
10282 return NULL_TREE;
10283
10284 /* (-A) / (-B) -> A / B */
10285 if (TREE_CODE (arg0) == NEGATE_EXPR && negate_expr_p (arg1))
10286 return fold_build2_loc (loc, RDIV_EXPR, type,
10287 TREE_OPERAND (arg0, 0),
10288 negate_expr (arg1));
10289 if (TREE_CODE (arg1) == NEGATE_EXPR && negate_expr_p (arg0))
10290 return fold_build2_loc (loc, RDIV_EXPR, type,
10291 negate_expr (arg0),
10292 TREE_OPERAND (arg1, 0));
10293 return NULL_TREE;
10294
10295 case TRUNC_DIV_EXPR:
10296 /* Fall through */
10297
10298 case FLOOR_DIV_EXPR:
10299 /* Simplify A / (B << N) where A and B are positive and B is
10300 a power of 2, to A >> (N + log2(B)). */
10301 strict_overflow_p = false;
10302 if (TREE_CODE (arg1) == LSHIFT_EXPR
10303 && (TYPE_UNSIGNED (type)
10304 || tree_expr_nonnegative_warnv_p (op0, &strict_overflow_p)))
10305 {
10306 tree sval = TREE_OPERAND (arg1, 0);
10307 if (integer_pow2p (sval) && tree_int_cst_sgn (sval) > 0)
10308 {
10309 tree sh_cnt = TREE_OPERAND (arg1, 1);
10310 tree pow2 = build_int_cst (TREE_TYPE (sh_cnt),
10311 wi::exact_log2 (sval));
10312
10313 if (strict_overflow_p)
10314 fold_overflow_warning (("assuming signed overflow does not "
10315 "occur when simplifying A / (B << N)"),
10316 WARN_STRICT_OVERFLOW_MISC);
10317
10318 sh_cnt = fold_build2_loc (loc, PLUS_EXPR, TREE_TYPE (sh_cnt),
10319 sh_cnt, pow2);
10320 return fold_build2_loc (loc, RSHIFT_EXPR, type,
10321 fold_convert_loc (loc, type, arg0), sh_cnt);
10322 }
10323 }
10324
10325 /* Fall through */
10326
10327 case ROUND_DIV_EXPR:
10328 case CEIL_DIV_EXPR:
10329 case EXACT_DIV_EXPR:
10330 if (integer_zerop (arg1))
10331 return NULL_TREE;
10332
10333 /* Convert -A / -B to A / B when the type is signed and overflow is
10334 undefined. */
10335 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10336 && TREE_CODE (arg0) == NEGATE_EXPR
10337 && negate_expr_p (op1))
10338 {
10339 if (INTEGRAL_TYPE_P (type))
10340 fold_overflow_warning (("assuming signed overflow does not occur "
10341 "when distributing negation across "
10342 "division"),
10343 WARN_STRICT_OVERFLOW_MISC);
10344 return fold_build2_loc (loc, code, type,
10345 fold_convert_loc (loc, type,
10346 TREE_OPERAND (arg0, 0)),
10347 negate_expr (op1));
10348 }
10349 if ((!INTEGRAL_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
10350 && TREE_CODE (arg1) == NEGATE_EXPR
10351 && negate_expr_p (op0))
10352 {
10353 if (INTEGRAL_TYPE_P (type))
10354 fold_overflow_warning (("assuming signed overflow does not occur "
10355 "when distributing negation across "
10356 "division"),
10357 WARN_STRICT_OVERFLOW_MISC);
10358 return fold_build2_loc (loc, code, type,
10359 negate_expr (op0),
10360 fold_convert_loc (loc, type,
10361 TREE_OPERAND (arg1, 0)));
10362 }
10363
10364 /* If arg0 is a multiple of arg1, then rewrite to the fastest div
10365 operation, EXACT_DIV_EXPR.
10366
10367 Note that only CEIL_DIV_EXPR and FLOOR_DIV_EXPR are rewritten now.
10368 At one time others generated faster code, it's not clear if they do
10369 after the last round to changes to the DIV code in expmed.c. */
10370 if ((code == CEIL_DIV_EXPR || code == FLOOR_DIV_EXPR)
10371 && multiple_of_p (type, arg0, arg1))
10372 return fold_build2_loc (loc, EXACT_DIV_EXPR, type,
10373 fold_convert (type, arg0),
10374 fold_convert (type, arg1));
10375
10376 strict_overflow_p = false;
10377 if (TREE_CODE (arg1) == INTEGER_CST
10378 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10379 &strict_overflow_p)))
10380 {
10381 if (strict_overflow_p)
10382 fold_overflow_warning (("assuming signed overflow does not occur "
10383 "when simplifying division"),
10384 WARN_STRICT_OVERFLOW_MISC);
10385 return fold_convert_loc (loc, type, tem);
10386 }
10387
10388 return NULL_TREE;
10389
10390 case CEIL_MOD_EXPR:
10391 case FLOOR_MOD_EXPR:
10392 case ROUND_MOD_EXPR:
10393 case TRUNC_MOD_EXPR:
10394 strict_overflow_p = false;
10395 if (TREE_CODE (arg1) == INTEGER_CST
10396 && 0 != (tem = extract_muldiv (op0, arg1, code, NULL_TREE,
10397 &strict_overflow_p)))
10398 {
10399 if (strict_overflow_p)
10400 fold_overflow_warning (("assuming signed overflow does not occur "
10401 "when simplifying modulus"),
10402 WARN_STRICT_OVERFLOW_MISC);
10403 return fold_convert_loc (loc, type, tem);
10404 }
10405
10406 return NULL_TREE;
10407
10408 case LROTATE_EXPR:
10409 case RROTATE_EXPR:
10410 case RSHIFT_EXPR:
10411 case LSHIFT_EXPR:
10412 /* Since negative shift count is not well-defined,
10413 don't try to compute it in the compiler. */
10414 if (TREE_CODE (arg1) == INTEGER_CST && tree_int_cst_sgn (arg1) < 0)
10415 return NULL_TREE;
10416
10417 prec = element_precision (type);
10418
10419 /* If we have a rotate of a bit operation with the rotate count and
10420 the second operand of the bit operation both constant,
10421 permute the two operations. */
10422 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10423 && (TREE_CODE (arg0) == BIT_AND_EXPR
10424 || TREE_CODE (arg0) == BIT_IOR_EXPR
10425 || TREE_CODE (arg0) == BIT_XOR_EXPR)
10426 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10427 return fold_build2_loc (loc, TREE_CODE (arg0), type,
10428 fold_build2_loc (loc, code, type,
10429 TREE_OPERAND (arg0, 0), arg1),
10430 fold_build2_loc (loc, code, type,
10431 TREE_OPERAND (arg0, 1), arg1));
10432
10433 /* Two consecutive rotates adding up to the some integer
10434 multiple of the precision of the type can be ignored. */
10435 if (code == RROTATE_EXPR && TREE_CODE (arg1) == INTEGER_CST
10436 && TREE_CODE (arg0) == RROTATE_EXPR
10437 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
10438 && wi::umod_trunc (wi::add (arg1, TREE_OPERAND (arg0, 1)),
10439 prec) == 0)
10440 return TREE_OPERAND (arg0, 0);
10441
10442 return NULL_TREE;
10443
10444 case MIN_EXPR:
10445 case MAX_EXPR:
10446 goto associate;
10447
10448 case TRUTH_ANDIF_EXPR:
10449 /* Note that the operands of this must be ints
10450 and their values must be 0 or 1.
10451 ("true" is a fixed value perhaps depending on the language.) */
10452 /* If first arg is constant zero, return it. */
10453 if (integer_zerop (arg0))
10454 return fold_convert_loc (loc, type, arg0);
10455 case TRUTH_AND_EXPR:
10456 /* If either arg is constant true, drop it. */
10457 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10458 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10459 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1)
10460 /* Preserve sequence points. */
10461 && (code != TRUTH_ANDIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10462 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10463 /* If second arg is constant zero, result is zero, but first arg
10464 must be evaluated. */
10465 if (integer_zerop (arg1))
10466 return omit_one_operand_loc (loc, type, arg1, arg0);
10467 /* Likewise for first arg, but note that only the TRUTH_AND_EXPR
10468 case will be handled here. */
10469 if (integer_zerop (arg0))
10470 return omit_one_operand_loc (loc, type, arg0, arg1);
10471
10472 /* !X && X is always false. */
10473 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10474 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10475 return omit_one_operand_loc (loc, type, integer_zero_node, arg1);
10476 /* X && !X is always false. */
10477 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10478 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10479 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10480
10481 /* A < X && A + 1 > Y ==> A < X && A >= Y. Normally A + 1 > Y
10482 means A >= Y && A != MAX, but in this case we know that
10483 A < X <= MAX. */
10484
10485 if (!TREE_SIDE_EFFECTS (arg0)
10486 && !TREE_SIDE_EFFECTS (arg1))
10487 {
10488 tem = fold_to_nonsharp_ineq_using_bound (loc, arg0, arg1);
10489 if (tem && !operand_equal_p (tem, arg0, 0))
10490 return fold_build2_loc (loc, code, type, tem, arg1);
10491
10492 tem = fold_to_nonsharp_ineq_using_bound (loc, arg1, arg0);
10493 if (tem && !operand_equal_p (tem, arg1, 0))
10494 return fold_build2_loc (loc, code, type, arg0, tem);
10495 }
10496
10497 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10498 != NULL_TREE)
10499 return tem;
10500
10501 return NULL_TREE;
10502
10503 case TRUTH_ORIF_EXPR:
10504 /* Note that the operands of this must be ints
10505 and their values must be 0 or true.
10506 ("true" is a fixed value perhaps depending on the language.) */
10507 /* If first arg is constant true, return it. */
10508 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10509 return fold_convert_loc (loc, type, arg0);
10510 case TRUTH_OR_EXPR:
10511 /* If either arg is constant zero, drop it. */
10512 if (TREE_CODE (arg0) == INTEGER_CST && integer_zerop (arg0))
10513 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg1));
10514 if (TREE_CODE (arg1) == INTEGER_CST && integer_zerop (arg1)
10515 /* Preserve sequence points. */
10516 && (code != TRUTH_ORIF_EXPR || ! TREE_SIDE_EFFECTS (arg0)))
10517 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10518 /* If second arg is constant true, result is true, but we must
10519 evaluate first arg. */
10520 if (TREE_CODE (arg1) == INTEGER_CST && ! integer_zerop (arg1))
10521 return omit_one_operand_loc (loc, type, arg1, arg0);
10522 /* Likewise for first arg, but note this only occurs here for
10523 TRUTH_OR_EXPR. */
10524 if (TREE_CODE (arg0) == INTEGER_CST && ! integer_zerop (arg0))
10525 return omit_one_operand_loc (loc, type, arg0, arg1);
10526
10527 /* !X || X is always true. */
10528 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10529 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10530 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10531 /* X || !X is always true. */
10532 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10533 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10534 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10535
10536 /* (X && !Y) || (!X && Y) is X ^ Y */
10537 if (TREE_CODE (arg0) == TRUTH_AND_EXPR
10538 && TREE_CODE (arg1) == TRUTH_AND_EXPR)
10539 {
10540 tree a0, a1, l0, l1, n0, n1;
10541
10542 a0 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 0));
10543 a1 = fold_convert_loc (loc, type, TREE_OPERAND (arg1, 1));
10544
10545 l0 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 0));
10546 l1 = fold_convert_loc (loc, type, TREE_OPERAND (arg0, 1));
10547
10548 n0 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l0);
10549 n1 = fold_build1_loc (loc, TRUTH_NOT_EXPR, type, l1);
10550
10551 if ((operand_equal_p (n0, a0, 0)
10552 && operand_equal_p (n1, a1, 0))
10553 || (operand_equal_p (n0, a1, 0)
10554 && operand_equal_p (n1, a0, 0)))
10555 return fold_build2_loc (loc, TRUTH_XOR_EXPR, type, l0, n1);
10556 }
10557
10558 if ((tem = fold_truth_andor (loc, code, type, arg0, arg1, op0, op1))
10559 != NULL_TREE)
10560 return tem;
10561
10562 return NULL_TREE;
10563
10564 case TRUTH_XOR_EXPR:
10565 /* If the second arg is constant zero, drop it. */
10566 if (integer_zerop (arg1))
10567 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10568 /* If the second arg is constant true, this is a logical inversion. */
10569 if (integer_onep (arg1))
10570 {
10571 tem = invert_truthvalue_loc (loc, arg0);
10572 return non_lvalue_loc (loc, fold_convert_loc (loc, type, tem));
10573 }
10574 /* Identical arguments cancel to zero. */
10575 if (operand_equal_p (arg0, arg1, 0))
10576 return omit_one_operand_loc (loc, type, integer_zero_node, arg0);
10577
10578 /* !X ^ X is always true. */
10579 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR
10580 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0))
10581 return omit_one_operand_loc (loc, type, integer_one_node, arg1);
10582
10583 /* X ^ !X is always true. */
10584 if (TREE_CODE (arg1) == TRUTH_NOT_EXPR
10585 && operand_equal_p (arg0, TREE_OPERAND (arg1, 0), 0))
10586 return omit_one_operand_loc (loc, type, integer_one_node, arg0);
10587
10588 return NULL_TREE;
10589
10590 case EQ_EXPR:
10591 case NE_EXPR:
10592 STRIP_NOPS (arg0);
10593 STRIP_NOPS (arg1);
10594
10595 tem = fold_comparison (loc, code, type, op0, op1);
10596 if (tem != NULL_TREE)
10597 return tem;
10598
10599 /* bool_var != 1 becomes !bool_var. */
10600 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_onep (arg1)
10601 && code == NE_EXPR)
10602 return fold_convert_loc (loc, type,
10603 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10604 TREE_TYPE (arg0), arg0));
10605
10606 /* bool_var == 0 becomes !bool_var. */
10607 if (TREE_CODE (TREE_TYPE (arg0)) == BOOLEAN_TYPE && integer_zerop (arg1)
10608 && code == EQ_EXPR)
10609 return fold_convert_loc (loc, type,
10610 fold_build1_loc (loc, TRUTH_NOT_EXPR,
10611 TREE_TYPE (arg0), arg0));
10612
10613 /* !exp != 0 becomes !exp */
10614 if (TREE_CODE (arg0) == TRUTH_NOT_EXPR && integer_zerop (arg1)
10615 && code == NE_EXPR)
10616 return non_lvalue_loc (loc, fold_convert_loc (loc, type, arg0));
10617
10618 /* Transform comparisons of the form X +- Y CMP X to Y CMP 0. */
10619 if ((TREE_CODE (arg0) == PLUS_EXPR
10620 || TREE_CODE (arg0) == POINTER_PLUS_EXPR
10621 || TREE_CODE (arg0) == MINUS_EXPR)
10622 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10623 0)),
10624 arg1, 0)
10625 && (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
10626 || POINTER_TYPE_P (TREE_TYPE (arg0))))
10627 {
10628 tree val = TREE_OPERAND (arg0, 1);
10629 val = fold_build2_loc (loc, code, type, val,
10630 build_int_cst (TREE_TYPE (val), 0));
10631 return omit_two_operands_loc (loc, type, val,
10632 TREE_OPERAND (arg0, 0), arg1);
10633 }
10634
10635 /* Transform comparisons of the form X CMP X +- Y to Y CMP 0. */
10636 if ((TREE_CODE (arg1) == PLUS_EXPR
10637 || TREE_CODE (arg1) == POINTER_PLUS_EXPR
10638 || TREE_CODE (arg1) == MINUS_EXPR)
10639 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10640 0)),
10641 arg0, 0)
10642 && (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
10643 || POINTER_TYPE_P (TREE_TYPE (arg1))))
10644 {
10645 tree val = TREE_OPERAND (arg1, 1);
10646 val = fold_build2_loc (loc, code, type, val,
10647 build_int_cst (TREE_TYPE (val), 0));
10648 return omit_two_operands_loc (loc, type, val,
10649 TREE_OPERAND (arg1, 0), arg0);
10650 }
10651
10652 /* Transform comparisons of the form C - X CMP X if C % 2 == 1. */
10653 if (TREE_CODE (arg0) == MINUS_EXPR
10654 && TREE_CODE (TREE_OPERAND (arg0, 0)) == INTEGER_CST
10655 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg0,
10656 1)),
10657 arg1, 0)
10658 && wi::extract_uhwi (TREE_OPERAND (arg0, 0), 0, 1) == 1)
10659 return omit_two_operands_loc (loc, type,
10660 code == NE_EXPR
10661 ? boolean_true_node : boolean_false_node,
10662 TREE_OPERAND (arg0, 1), arg1);
10663
10664 /* Transform comparisons of the form X CMP C - X if C % 2 == 1. */
10665 if (TREE_CODE (arg1) == MINUS_EXPR
10666 && TREE_CODE (TREE_OPERAND (arg1, 0)) == INTEGER_CST
10667 && operand_equal_p (tree_strip_nop_conversions (TREE_OPERAND (arg1,
10668 1)),
10669 arg0, 0)
10670 && wi::extract_uhwi (TREE_OPERAND (arg1, 0), 0, 1) == 1)
10671 return omit_two_operands_loc (loc, type,
10672 code == NE_EXPR
10673 ? boolean_true_node : boolean_false_node,
10674 TREE_OPERAND (arg1, 1), arg0);
10675
10676 /* If this is an EQ or NE comparison with zero and ARG0 is
10677 (1 << foo) & bar, convert it to (bar >> foo) & 1. Both require
10678 two operations, but the latter can be done in one less insn
10679 on machines that have only two-operand insns or on which a
10680 constant cannot be the first operand. */
10681 if (TREE_CODE (arg0) == BIT_AND_EXPR
10682 && integer_zerop (arg1))
10683 {
10684 tree arg00 = TREE_OPERAND (arg0, 0);
10685 tree arg01 = TREE_OPERAND (arg0, 1);
10686 if (TREE_CODE (arg00) == LSHIFT_EXPR
10687 && integer_onep (TREE_OPERAND (arg00, 0)))
10688 {
10689 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg00),
10690 arg01, TREE_OPERAND (arg00, 1));
10691 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10692 build_int_cst (TREE_TYPE (arg0), 1));
10693 return fold_build2_loc (loc, code, type,
10694 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10695 arg1);
10696 }
10697 else if (TREE_CODE (arg01) == LSHIFT_EXPR
10698 && integer_onep (TREE_OPERAND (arg01, 0)))
10699 {
10700 tree tem = fold_build2_loc (loc, RSHIFT_EXPR, TREE_TYPE (arg01),
10701 arg00, TREE_OPERAND (arg01, 1));
10702 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0), tem,
10703 build_int_cst (TREE_TYPE (arg0), 1));
10704 return fold_build2_loc (loc, code, type,
10705 fold_convert_loc (loc, TREE_TYPE (arg1), tem),
10706 arg1);
10707 }
10708 }
10709
10710 /* If this is an NE or EQ comparison of zero against the result of a
10711 signed MOD operation whose second operand is a power of 2, make
10712 the MOD operation unsigned since it is simpler and equivalent. */
10713 if (integer_zerop (arg1)
10714 && !TYPE_UNSIGNED (TREE_TYPE (arg0))
10715 && (TREE_CODE (arg0) == TRUNC_MOD_EXPR
10716 || TREE_CODE (arg0) == CEIL_MOD_EXPR
10717 || TREE_CODE (arg0) == FLOOR_MOD_EXPR
10718 || TREE_CODE (arg0) == ROUND_MOD_EXPR)
10719 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10720 {
10721 tree newtype = unsigned_type_for (TREE_TYPE (arg0));
10722 tree newmod = fold_build2_loc (loc, TREE_CODE (arg0), newtype,
10723 fold_convert_loc (loc, newtype,
10724 TREE_OPERAND (arg0, 0)),
10725 fold_convert_loc (loc, newtype,
10726 TREE_OPERAND (arg0, 1)));
10727
10728 return fold_build2_loc (loc, code, type, newmod,
10729 fold_convert_loc (loc, newtype, arg1));
10730 }
10731
10732 /* Fold ((X >> C1) & C2) == 0 and ((X >> C1) & C2) != 0 where
10733 C1 is a valid shift constant, and C2 is a power of two, i.e.
10734 a single bit. */
10735 if (TREE_CODE (arg0) == BIT_AND_EXPR
10736 && TREE_CODE (TREE_OPERAND (arg0, 0)) == RSHIFT_EXPR
10737 && TREE_CODE (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1))
10738 == INTEGER_CST
10739 && integer_pow2p (TREE_OPERAND (arg0, 1))
10740 && integer_zerop (arg1))
10741 {
10742 tree itype = TREE_TYPE (arg0);
10743 tree arg001 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 1);
10744 prec = TYPE_PRECISION (itype);
10745
10746 /* Check for a valid shift count. */
10747 if (wi::ltu_p (arg001, prec))
10748 {
10749 tree arg01 = TREE_OPERAND (arg0, 1);
10750 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10751 unsigned HOST_WIDE_INT log2 = tree_log2 (arg01);
10752 /* If (C2 << C1) doesn't overflow, then ((X >> C1) & C2) != 0
10753 can be rewritten as (X & (C2 << C1)) != 0. */
10754 if ((log2 + TREE_INT_CST_LOW (arg001)) < prec)
10755 {
10756 tem = fold_build2_loc (loc, LSHIFT_EXPR, itype, arg01, arg001);
10757 tem = fold_build2_loc (loc, BIT_AND_EXPR, itype, arg000, tem);
10758 return fold_build2_loc (loc, code, type, tem,
10759 fold_convert_loc (loc, itype, arg1));
10760 }
10761 /* Otherwise, for signed (arithmetic) shifts,
10762 ((X >> C1) & C2) != 0 is rewritten as X < 0, and
10763 ((X >> C1) & C2) == 0 is rewritten as X >= 0. */
10764 else if (!TYPE_UNSIGNED (itype))
10765 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR, type,
10766 arg000, build_int_cst (itype, 0));
10767 /* Otherwise, of unsigned (logical) shifts,
10768 ((X >> C1) & C2) != 0 is rewritten as (X,false), and
10769 ((X >> C1) & C2) == 0 is rewritten as (X,true). */
10770 else
10771 return omit_one_operand_loc (loc, type,
10772 code == EQ_EXPR ? integer_one_node
10773 : integer_zero_node,
10774 arg000);
10775 }
10776 }
10777
10778 /* If we have (A & C) == D where D & ~C != 0, convert this into 0.
10779 Similarly for NE_EXPR. */
10780 if (TREE_CODE (arg0) == BIT_AND_EXPR
10781 && TREE_CODE (arg1) == INTEGER_CST
10782 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10783 {
10784 tree notc = fold_build1_loc (loc, BIT_NOT_EXPR,
10785 TREE_TYPE (TREE_OPERAND (arg0, 1)),
10786 TREE_OPERAND (arg0, 1));
10787 tree dandnotc
10788 = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10789 fold_convert_loc (loc, TREE_TYPE (arg0), arg1),
10790 notc);
10791 tree rslt = code == EQ_EXPR ? integer_zero_node : integer_one_node;
10792 if (integer_nonzerop (dandnotc))
10793 return omit_one_operand_loc (loc, type, rslt, arg0);
10794 }
10795
10796 /* If this is a comparison of a field, we may be able to simplify it. */
10797 if ((TREE_CODE (arg0) == COMPONENT_REF
10798 || TREE_CODE (arg0) == BIT_FIELD_REF)
10799 /* Handle the constant case even without -O
10800 to make sure the warnings are given. */
10801 && (optimize || TREE_CODE (arg1) == INTEGER_CST))
10802 {
10803 t1 = optimize_bit_field_compare (loc, code, type, arg0, arg1);
10804 if (t1)
10805 return t1;
10806 }
10807
10808 /* Optimize comparisons of strlen vs zero to a compare of the
10809 first character of the string vs zero. To wit,
10810 strlen(ptr) == 0 => *ptr == 0
10811 strlen(ptr) != 0 => *ptr != 0
10812 Other cases should reduce to one of these two (or a constant)
10813 due to the return value of strlen being unsigned. */
10814 if (TREE_CODE (arg0) == CALL_EXPR
10815 && integer_zerop (arg1))
10816 {
10817 tree fndecl = get_callee_fndecl (arg0);
10818
10819 if (fndecl
10820 && DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL
10821 && DECL_FUNCTION_CODE (fndecl) == BUILT_IN_STRLEN
10822 && call_expr_nargs (arg0) == 1
10823 && TREE_CODE (TREE_TYPE (CALL_EXPR_ARG (arg0, 0))) == POINTER_TYPE)
10824 {
10825 tree iref = build_fold_indirect_ref_loc (loc,
10826 CALL_EXPR_ARG (arg0, 0));
10827 return fold_build2_loc (loc, code, type, iref,
10828 build_int_cst (TREE_TYPE (iref), 0));
10829 }
10830 }
10831
10832 /* Fold (X >> C) != 0 into X < 0 if C is one less than the width
10833 of X. Similarly fold (X >> C) == 0 into X >= 0. */
10834 if (TREE_CODE (arg0) == RSHIFT_EXPR
10835 && integer_zerop (arg1)
10836 && TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST)
10837 {
10838 tree arg00 = TREE_OPERAND (arg0, 0);
10839 tree arg01 = TREE_OPERAND (arg0, 1);
10840 tree itype = TREE_TYPE (arg00);
10841 if (wi::eq_p (arg01, element_precision (itype) - 1))
10842 {
10843 if (TYPE_UNSIGNED (itype))
10844 {
10845 itype = signed_type_for (itype);
10846 arg00 = fold_convert_loc (loc, itype, arg00);
10847 }
10848 return fold_build2_loc (loc, code == EQ_EXPR ? GE_EXPR : LT_EXPR,
10849 type, arg00, build_zero_cst (itype));
10850 }
10851 }
10852
10853 /* Fold (~X & C) == 0 into (X & C) != 0 and (~X & C) != 0 into
10854 (X & C) == 0 when C is a single bit. */
10855 if (TREE_CODE (arg0) == BIT_AND_EXPR
10856 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_NOT_EXPR
10857 && integer_zerop (arg1)
10858 && integer_pow2p (TREE_OPERAND (arg0, 1)))
10859 {
10860 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg0),
10861 TREE_OPERAND (TREE_OPERAND (arg0, 0), 0),
10862 TREE_OPERAND (arg0, 1));
10863 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR,
10864 type, tem,
10865 fold_convert_loc (loc, TREE_TYPE (arg0),
10866 arg1));
10867 }
10868
10869 /* Fold ((X & C) ^ C) eq/ne 0 into (X & C) ne/eq 0, when the
10870 constant C is a power of two, i.e. a single bit. */
10871 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10872 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
10873 && integer_zerop (arg1)
10874 && integer_pow2p (TREE_OPERAND (arg0, 1))
10875 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10876 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10877 {
10878 tree arg00 = TREE_OPERAND (arg0, 0);
10879 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10880 arg00, build_int_cst (TREE_TYPE (arg00), 0));
10881 }
10882
10883 /* Likewise, fold ((X ^ C) & C) eq/ne 0 into (X & C) ne/eq 0,
10884 when is C is a power of two, i.e. a single bit. */
10885 if (TREE_CODE (arg0) == BIT_AND_EXPR
10886 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_XOR_EXPR
10887 && integer_zerop (arg1)
10888 && integer_pow2p (TREE_OPERAND (arg0, 1))
10889 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
10890 TREE_OPERAND (arg0, 1), OEP_ONLY_CONST))
10891 {
10892 tree arg000 = TREE_OPERAND (TREE_OPERAND (arg0, 0), 0);
10893 tem = fold_build2_loc (loc, BIT_AND_EXPR, TREE_TYPE (arg000),
10894 arg000, TREE_OPERAND (arg0, 1));
10895 return fold_build2_loc (loc, code == EQ_EXPR ? NE_EXPR : EQ_EXPR, type,
10896 tem, build_int_cst (TREE_TYPE (tem), 0));
10897 }
10898
10899 if (integer_zerop (arg1)
10900 && tree_expr_nonzero_p (arg0))
10901 {
10902 tree res = constant_boolean_node (code==NE_EXPR, type);
10903 return omit_one_operand_loc (loc, type, res, arg0);
10904 }
10905
10906 /* Fold (X & C) op (Y & C) as (X ^ Y) & C op 0", and symmetries. */
10907 if (TREE_CODE (arg0) == BIT_AND_EXPR
10908 && TREE_CODE (arg1) == BIT_AND_EXPR)
10909 {
10910 tree arg00 = TREE_OPERAND (arg0, 0);
10911 tree arg01 = TREE_OPERAND (arg0, 1);
10912 tree arg10 = TREE_OPERAND (arg1, 0);
10913 tree arg11 = TREE_OPERAND (arg1, 1);
10914 tree itype = TREE_TYPE (arg0);
10915
10916 if (operand_equal_p (arg01, arg11, 0))
10917 return fold_build2_loc (loc, code, type,
10918 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10919 fold_build2_loc (loc,
10920 BIT_XOR_EXPR, itype,
10921 arg00, arg10),
10922 arg01),
10923 build_zero_cst (itype));
10924
10925 if (operand_equal_p (arg01, arg10, 0))
10926 return fold_build2_loc (loc, code, type,
10927 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10928 fold_build2_loc (loc,
10929 BIT_XOR_EXPR, itype,
10930 arg00, arg11),
10931 arg01),
10932 build_zero_cst (itype));
10933
10934 if (operand_equal_p (arg00, arg11, 0))
10935 return fold_build2_loc (loc, code, type,
10936 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10937 fold_build2_loc (loc,
10938 BIT_XOR_EXPR, itype,
10939 arg01, arg10),
10940 arg00),
10941 build_zero_cst (itype));
10942
10943 if (operand_equal_p (arg00, arg10, 0))
10944 return fold_build2_loc (loc, code, type,
10945 fold_build2_loc (loc, BIT_AND_EXPR, itype,
10946 fold_build2_loc (loc,
10947 BIT_XOR_EXPR, itype,
10948 arg01, arg11),
10949 arg00),
10950 build_zero_cst (itype));
10951 }
10952
10953 if (TREE_CODE (arg0) == BIT_XOR_EXPR
10954 && TREE_CODE (arg1) == BIT_XOR_EXPR)
10955 {
10956 tree arg00 = TREE_OPERAND (arg0, 0);
10957 tree arg01 = TREE_OPERAND (arg0, 1);
10958 tree arg10 = TREE_OPERAND (arg1, 0);
10959 tree arg11 = TREE_OPERAND (arg1, 1);
10960 tree itype = TREE_TYPE (arg0);
10961
10962 /* Optimize (X ^ Z) op (Y ^ Z) as X op Y, and symmetries.
10963 operand_equal_p guarantees no side-effects so we don't need
10964 to use omit_one_operand on Z. */
10965 if (operand_equal_p (arg01, arg11, 0))
10966 return fold_build2_loc (loc, code, type, arg00,
10967 fold_convert_loc (loc, TREE_TYPE (arg00),
10968 arg10));
10969 if (operand_equal_p (arg01, arg10, 0))
10970 return fold_build2_loc (loc, code, type, arg00,
10971 fold_convert_loc (loc, TREE_TYPE (arg00),
10972 arg11));
10973 if (operand_equal_p (arg00, arg11, 0))
10974 return fold_build2_loc (loc, code, type, arg01,
10975 fold_convert_loc (loc, TREE_TYPE (arg01),
10976 arg10));
10977 if (operand_equal_p (arg00, arg10, 0))
10978 return fold_build2_loc (loc, code, type, arg01,
10979 fold_convert_loc (loc, TREE_TYPE (arg01),
10980 arg11));
10981
10982 /* Optimize (X ^ C1) op (Y ^ C2) as (X ^ (C1 ^ C2)) op Y. */
10983 if (TREE_CODE (arg01) == INTEGER_CST
10984 && TREE_CODE (arg11) == INTEGER_CST)
10985 {
10986 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg01,
10987 fold_convert_loc (loc, itype, arg11));
10988 tem = fold_build2_loc (loc, BIT_XOR_EXPR, itype, arg00, tem);
10989 return fold_build2_loc (loc, code, type, tem,
10990 fold_convert_loc (loc, itype, arg10));
10991 }
10992 }
10993
10994 /* Attempt to simplify equality/inequality comparisons of complex
10995 values. Only lower the comparison if the result is known or
10996 can be simplified to a single scalar comparison. */
10997 if ((TREE_CODE (arg0) == COMPLEX_EXPR
10998 || TREE_CODE (arg0) == COMPLEX_CST)
10999 && (TREE_CODE (arg1) == COMPLEX_EXPR
11000 || TREE_CODE (arg1) == COMPLEX_CST))
11001 {
11002 tree real0, imag0, real1, imag1;
11003 tree rcond, icond;
11004
11005 if (TREE_CODE (arg0) == COMPLEX_EXPR)
11006 {
11007 real0 = TREE_OPERAND (arg0, 0);
11008 imag0 = TREE_OPERAND (arg0, 1);
11009 }
11010 else
11011 {
11012 real0 = TREE_REALPART (arg0);
11013 imag0 = TREE_IMAGPART (arg0);
11014 }
11015
11016 if (TREE_CODE (arg1) == COMPLEX_EXPR)
11017 {
11018 real1 = TREE_OPERAND (arg1, 0);
11019 imag1 = TREE_OPERAND (arg1, 1);
11020 }
11021 else
11022 {
11023 real1 = TREE_REALPART (arg1);
11024 imag1 = TREE_IMAGPART (arg1);
11025 }
11026
11027 rcond = fold_binary_loc (loc, code, type, real0, real1);
11028 if (rcond && TREE_CODE (rcond) == INTEGER_CST)
11029 {
11030 if (integer_zerop (rcond))
11031 {
11032 if (code == EQ_EXPR)
11033 return omit_two_operands_loc (loc, type, boolean_false_node,
11034 imag0, imag1);
11035 return fold_build2_loc (loc, NE_EXPR, type, imag0, imag1);
11036 }
11037 else
11038 {
11039 if (code == NE_EXPR)
11040 return omit_two_operands_loc (loc, type, boolean_true_node,
11041 imag0, imag1);
11042 return fold_build2_loc (loc, EQ_EXPR, type, imag0, imag1);
11043 }
11044 }
11045
11046 icond = fold_binary_loc (loc, code, type, imag0, imag1);
11047 if (icond && TREE_CODE (icond) == INTEGER_CST)
11048 {
11049 if (integer_zerop (icond))
11050 {
11051 if (code == EQ_EXPR)
11052 return omit_two_operands_loc (loc, type, boolean_false_node,
11053 real0, real1);
11054 return fold_build2_loc (loc, NE_EXPR, type, real0, real1);
11055 }
11056 else
11057 {
11058 if (code == NE_EXPR)
11059 return omit_two_operands_loc (loc, type, boolean_true_node,
11060 real0, real1);
11061 return fold_build2_loc (loc, EQ_EXPR, type, real0, real1);
11062 }
11063 }
11064 }
11065
11066 return NULL_TREE;
11067
11068 case LT_EXPR:
11069 case GT_EXPR:
11070 case LE_EXPR:
11071 case GE_EXPR:
11072 tem = fold_comparison (loc, code, type, op0, op1);
11073 if (tem != NULL_TREE)
11074 return tem;
11075
11076 /* Transform comparisons of the form X +- C CMP X. */
11077 if ((TREE_CODE (arg0) == PLUS_EXPR || TREE_CODE (arg0) == MINUS_EXPR)
11078 && operand_equal_p (TREE_OPERAND (arg0, 0), arg1, 0)
11079 && ((TREE_CODE (TREE_OPERAND (arg0, 1)) == REAL_CST
11080 && !HONOR_SNANS (arg0))
11081 || (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
11082 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))))
11083 {
11084 tree arg01 = TREE_OPERAND (arg0, 1);
11085 enum tree_code code0 = TREE_CODE (arg0);
11086 int is_positive;
11087
11088 if (TREE_CODE (arg01) == REAL_CST)
11089 is_positive = REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg01)) ? -1 : 1;
11090 else
11091 is_positive = tree_int_cst_sgn (arg01);
11092
11093 /* (X - c) > X becomes false. */
11094 if (code == GT_EXPR
11095 && ((code0 == MINUS_EXPR && is_positive >= 0)
11096 || (code0 == PLUS_EXPR && is_positive <= 0)))
11097 {
11098 if (TREE_CODE (arg01) == INTEGER_CST
11099 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11100 fold_overflow_warning (("assuming signed overflow does not "
11101 "occur when assuming that (X - c) > X "
11102 "is always false"),
11103 WARN_STRICT_OVERFLOW_ALL);
11104 return constant_boolean_node (0, type);
11105 }
11106
11107 /* Likewise (X + c) < X becomes false. */
11108 if (code == LT_EXPR
11109 && ((code0 == PLUS_EXPR && is_positive >= 0)
11110 || (code0 == MINUS_EXPR && is_positive <= 0)))
11111 {
11112 if (TREE_CODE (arg01) == INTEGER_CST
11113 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11114 fold_overflow_warning (("assuming signed overflow does not "
11115 "occur when assuming that "
11116 "(X + c) < X is always false"),
11117 WARN_STRICT_OVERFLOW_ALL);
11118 return constant_boolean_node (0, type);
11119 }
11120
11121 /* Convert (X - c) <= X to true. */
11122 if (!HONOR_NANS (arg1)
11123 && code == LE_EXPR
11124 && ((code0 == MINUS_EXPR && is_positive >= 0)
11125 || (code0 == PLUS_EXPR && is_positive <= 0)))
11126 {
11127 if (TREE_CODE (arg01) == INTEGER_CST
11128 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11129 fold_overflow_warning (("assuming signed overflow does not "
11130 "occur when assuming that "
11131 "(X - c) <= X is always true"),
11132 WARN_STRICT_OVERFLOW_ALL);
11133 return constant_boolean_node (1, type);
11134 }
11135
11136 /* Convert (X + c) >= X to true. */
11137 if (!HONOR_NANS (arg1)
11138 && code == GE_EXPR
11139 && ((code0 == PLUS_EXPR && is_positive >= 0)
11140 || (code0 == MINUS_EXPR && is_positive <= 0)))
11141 {
11142 if (TREE_CODE (arg01) == INTEGER_CST
11143 && TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11144 fold_overflow_warning (("assuming signed overflow does not "
11145 "occur when assuming that "
11146 "(X + c) >= X is always true"),
11147 WARN_STRICT_OVERFLOW_ALL);
11148 return constant_boolean_node (1, type);
11149 }
11150
11151 if (TREE_CODE (arg01) == INTEGER_CST)
11152 {
11153 /* Convert X + c > X and X - c < X to true for integers. */
11154 if (code == GT_EXPR
11155 && ((code0 == PLUS_EXPR && is_positive > 0)
11156 || (code0 == MINUS_EXPR && is_positive < 0)))
11157 {
11158 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11159 fold_overflow_warning (("assuming signed overflow does "
11160 "not occur when assuming that "
11161 "(X + c) > X is always true"),
11162 WARN_STRICT_OVERFLOW_ALL);
11163 return constant_boolean_node (1, type);
11164 }
11165
11166 if (code == LT_EXPR
11167 && ((code0 == MINUS_EXPR && is_positive > 0)
11168 || (code0 == PLUS_EXPR && is_positive < 0)))
11169 {
11170 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11171 fold_overflow_warning (("assuming signed overflow does "
11172 "not occur when assuming that "
11173 "(X - c) < X is always true"),
11174 WARN_STRICT_OVERFLOW_ALL);
11175 return constant_boolean_node (1, type);
11176 }
11177
11178 /* Convert X + c <= X and X - c >= X to false for integers. */
11179 if (code == LE_EXPR
11180 && ((code0 == PLUS_EXPR && is_positive > 0)
11181 || (code0 == MINUS_EXPR && is_positive < 0)))
11182 {
11183 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11184 fold_overflow_warning (("assuming signed overflow does "
11185 "not occur when assuming that "
11186 "(X + c) <= X is always false"),
11187 WARN_STRICT_OVERFLOW_ALL);
11188 return constant_boolean_node (0, type);
11189 }
11190
11191 if (code == GE_EXPR
11192 && ((code0 == MINUS_EXPR && is_positive > 0)
11193 || (code0 == PLUS_EXPR && is_positive < 0)))
11194 {
11195 if (TYPE_OVERFLOW_UNDEFINED (TREE_TYPE (arg1)))
11196 fold_overflow_warning (("assuming signed overflow does "
11197 "not occur when assuming that "
11198 "(X - c) >= X is always false"),
11199 WARN_STRICT_OVERFLOW_ALL);
11200 return constant_boolean_node (0, type);
11201 }
11202 }
11203 }
11204
11205 /* If we are comparing an ABS_EXPR with a constant, we can
11206 convert all the cases into explicit comparisons, but they may
11207 well not be faster than doing the ABS and one comparison.
11208 But ABS (X) <= C is a range comparison, which becomes a subtraction
11209 and a comparison, and is probably faster. */
11210 if (code == LE_EXPR
11211 && TREE_CODE (arg1) == INTEGER_CST
11212 && TREE_CODE (arg0) == ABS_EXPR
11213 && ! TREE_SIDE_EFFECTS (arg0)
11214 && (0 != (tem = negate_expr (arg1)))
11215 && TREE_CODE (tem) == INTEGER_CST
11216 && !TREE_OVERFLOW (tem))
11217 return fold_build2_loc (loc, TRUTH_ANDIF_EXPR, type,
11218 build2 (GE_EXPR, type,
11219 TREE_OPERAND (arg0, 0), tem),
11220 build2 (LE_EXPR, type,
11221 TREE_OPERAND (arg0, 0), arg1));
11222
11223 /* Convert ABS_EXPR<x> >= 0 to true. */
11224 strict_overflow_p = false;
11225 if (code == GE_EXPR
11226 && (integer_zerop (arg1)
11227 || (! HONOR_NANS (arg0)
11228 && real_zerop (arg1)))
11229 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11230 {
11231 if (strict_overflow_p)
11232 fold_overflow_warning (("assuming signed overflow does not occur "
11233 "when simplifying comparison of "
11234 "absolute value and zero"),
11235 WARN_STRICT_OVERFLOW_CONDITIONAL);
11236 return omit_one_operand_loc (loc, type,
11237 constant_boolean_node (true, type),
11238 arg0);
11239 }
11240
11241 /* Convert ABS_EXPR<x> < 0 to false. */
11242 strict_overflow_p = false;
11243 if (code == LT_EXPR
11244 && (integer_zerop (arg1) || real_zerop (arg1))
11245 && tree_expr_nonnegative_warnv_p (arg0, &strict_overflow_p))
11246 {
11247 if (strict_overflow_p)
11248 fold_overflow_warning (("assuming signed overflow does not occur "
11249 "when simplifying comparison of "
11250 "absolute value and zero"),
11251 WARN_STRICT_OVERFLOW_CONDITIONAL);
11252 return omit_one_operand_loc (loc, type,
11253 constant_boolean_node (false, type),
11254 arg0);
11255 }
11256
11257 /* If X is unsigned, convert X < (1 << Y) into X >> Y == 0
11258 and similarly for >= into !=. */
11259 if ((code == LT_EXPR || code == GE_EXPR)
11260 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11261 && TREE_CODE (arg1) == LSHIFT_EXPR
11262 && integer_onep (TREE_OPERAND (arg1, 0)))
11263 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11264 build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11265 TREE_OPERAND (arg1, 1)),
11266 build_zero_cst (TREE_TYPE (arg0)));
11267
11268 /* Similarly for X < (cast) (1 << Y). But cast can't be narrowing,
11269 otherwise Y might be >= # of bits in X's type and thus e.g.
11270 (unsigned char) (1 << Y) for Y 15 might be 0.
11271 If the cast is widening, then 1 << Y should have unsigned type,
11272 otherwise if Y is number of bits in the signed shift type minus 1,
11273 we can't optimize this. E.g. (unsigned long long) (1 << Y) for Y
11274 31 might be 0xffffffff80000000. */
11275 if ((code == LT_EXPR || code == GE_EXPR)
11276 && TYPE_UNSIGNED (TREE_TYPE (arg0))
11277 && CONVERT_EXPR_P (arg1)
11278 && TREE_CODE (TREE_OPERAND (arg1, 0)) == LSHIFT_EXPR
11279 && (element_precision (TREE_TYPE (arg1))
11280 >= element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0))))
11281 && (TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (arg1, 0)))
11282 || (element_precision (TREE_TYPE (arg1))
11283 == element_precision (TREE_TYPE (TREE_OPERAND (arg1, 0)))))
11284 && integer_onep (TREE_OPERAND (TREE_OPERAND (arg1, 0), 0)))
11285 {
11286 tem = build2 (RSHIFT_EXPR, TREE_TYPE (arg0), arg0,
11287 TREE_OPERAND (TREE_OPERAND (arg1, 0), 1));
11288 return build2_loc (loc, code == LT_EXPR ? EQ_EXPR : NE_EXPR, type,
11289 fold_convert_loc (loc, TREE_TYPE (arg0), tem),
11290 build_zero_cst (TREE_TYPE (arg0)));
11291 }
11292
11293 return NULL_TREE;
11294
11295 case UNORDERED_EXPR:
11296 case ORDERED_EXPR:
11297 case UNLT_EXPR:
11298 case UNLE_EXPR:
11299 case UNGT_EXPR:
11300 case UNGE_EXPR:
11301 case UNEQ_EXPR:
11302 case LTGT_EXPR:
11303 /* Fold (double)float1 CMP (double)float2 into float1 CMP float2. */
11304 {
11305 tree targ0 = strip_float_extensions (arg0);
11306 tree targ1 = strip_float_extensions (arg1);
11307 tree newtype = TREE_TYPE (targ0);
11308
11309 if (TYPE_PRECISION (TREE_TYPE (targ1)) > TYPE_PRECISION (newtype))
11310 newtype = TREE_TYPE (targ1);
11311
11312 if (TYPE_PRECISION (newtype) < TYPE_PRECISION (TREE_TYPE (arg0)))
11313 return fold_build2_loc (loc, code, type,
11314 fold_convert_loc (loc, newtype, targ0),
11315 fold_convert_loc (loc, newtype, targ1));
11316 }
11317
11318 return NULL_TREE;
11319
11320 case COMPOUND_EXPR:
11321 /* When pedantic, a compound expression can be neither an lvalue
11322 nor an integer constant expression. */
11323 if (TREE_SIDE_EFFECTS (arg0) || TREE_CONSTANT (arg1))
11324 return NULL_TREE;
11325 /* Don't let (0, 0) be null pointer constant. */
11326 tem = integer_zerop (arg1) ? build1 (NOP_EXPR, type, arg1)
11327 : fold_convert_loc (loc, type, arg1);
11328 return pedantic_non_lvalue_loc (loc, tem);
11329
11330 case ASSERT_EXPR:
11331 /* An ASSERT_EXPR should never be passed to fold_binary. */
11332 gcc_unreachable ();
11333
11334 default:
11335 return NULL_TREE;
11336 } /* switch (code) */
11337 }
11338
11339 /* Callback for walk_tree, looking for LABEL_EXPR. Return *TP if it is
11340 a LABEL_EXPR; otherwise return NULL_TREE. Do not check the subtrees
11341 of GOTO_EXPR. */
11342
11343 static tree
11344 contains_label_1 (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
11345 {
11346 switch (TREE_CODE (*tp))
11347 {
11348 case LABEL_EXPR:
11349 return *tp;
11350
11351 case GOTO_EXPR:
11352 *walk_subtrees = 0;
11353
11354 /* ... fall through ... */
11355
11356 default:
11357 return NULL_TREE;
11358 }
11359 }
11360
11361 /* Return whether the sub-tree ST contains a label which is accessible from
11362 outside the sub-tree. */
11363
11364 static bool
11365 contains_label_p (tree st)
11366 {
11367 return
11368 (walk_tree_without_duplicates (&st, contains_label_1 , NULL) != NULL_TREE);
11369 }
11370
11371 /* Fold a ternary expression of code CODE and type TYPE with operands
11372 OP0, OP1, and OP2. Return the folded expression if folding is
11373 successful. Otherwise, return NULL_TREE. */
11374
11375 tree
11376 fold_ternary_loc (location_t loc, enum tree_code code, tree type,
11377 tree op0, tree op1, tree op2)
11378 {
11379 tree tem;
11380 tree arg0 = NULL_TREE, arg1 = NULL_TREE, arg2 = NULL_TREE;
11381 enum tree_code_class kind = TREE_CODE_CLASS (code);
11382
11383 gcc_assert (IS_EXPR_CODE_CLASS (kind)
11384 && TREE_CODE_LENGTH (code) == 3);
11385
11386 /* If this is a commutative operation, and OP0 is a constant, move it
11387 to OP1 to reduce the number of tests below. */
11388 if (commutative_ternary_tree_code (code)
11389 && tree_swap_operands_p (op0, op1, true))
11390 return fold_build3_loc (loc, code, type, op1, op0, op2);
11391
11392 tem = generic_simplify (loc, code, type, op0, op1, op2);
11393 if (tem)
11394 return tem;
11395
11396 /* Strip any conversions that don't change the mode. This is safe
11397 for every expression, except for a comparison expression because
11398 its signedness is derived from its operands. So, in the latter
11399 case, only strip conversions that don't change the signedness.
11400
11401 Note that this is done as an internal manipulation within the
11402 constant folder, in order to find the simplest representation of
11403 the arguments so that their form can be studied. In any cases,
11404 the appropriate type conversions should be put back in the tree
11405 that will get out of the constant folder. */
11406 if (op0)
11407 {
11408 arg0 = op0;
11409 STRIP_NOPS (arg0);
11410 }
11411
11412 if (op1)
11413 {
11414 arg1 = op1;
11415 STRIP_NOPS (arg1);
11416 }
11417
11418 if (op2)
11419 {
11420 arg2 = op2;
11421 STRIP_NOPS (arg2);
11422 }
11423
11424 switch (code)
11425 {
11426 case COMPONENT_REF:
11427 if (TREE_CODE (arg0) == CONSTRUCTOR
11428 && ! type_contains_placeholder_p (TREE_TYPE (arg0)))
11429 {
11430 unsigned HOST_WIDE_INT idx;
11431 tree field, value;
11432 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (arg0), idx, field, value)
11433 if (field == arg1)
11434 return value;
11435 }
11436 return NULL_TREE;
11437
11438 case COND_EXPR:
11439 case VEC_COND_EXPR:
11440 /* Pedantic ANSI C says that a conditional expression is never an lvalue,
11441 so all simple results must be passed through pedantic_non_lvalue. */
11442 if (TREE_CODE (arg0) == INTEGER_CST)
11443 {
11444 tree unused_op = integer_zerop (arg0) ? op1 : op2;
11445 tem = integer_zerop (arg0) ? op2 : op1;
11446 /* Only optimize constant conditions when the selected branch
11447 has the same type as the COND_EXPR. This avoids optimizing
11448 away "c ? x : throw", where the throw has a void type.
11449 Avoid throwing away that operand which contains label. */
11450 if ((!TREE_SIDE_EFFECTS (unused_op)
11451 || !contains_label_p (unused_op))
11452 && (! VOID_TYPE_P (TREE_TYPE (tem))
11453 || VOID_TYPE_P (type)))
11454 return pedantic_non_lvalue_loc (loc, tem);
11455 return NULL_TREE;
11456 }
11457 else if (TREE_CODE (arg0) == VECTOR_CST)
11458 {
11459 if ((TREE_CODE (arg1) == VECTOR_CST
11460 || TREE_CODE (arg1) == CONSTRUCTOR)
11461 && (TREE_CODE (arg2) == VECTOR_CST
11462 || TREE_CODE (arg2) == CONSTRUCTOR))
11463 {
11464 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i;
11465 unsigned char *sel = XALLOCAVEC (unsigned char, nelts);
11466 gcc_assert (nelts == VECTOR_CST_NELTS (arg0));
11467 for (i = 0; i < nelts; i++)
11468 {
11469 tree val = VECTOR_CST_ELT (arg0, i);
11470 if (integer_all_onesp (val))
11471 sel[i] = i;
11472 else if (integer_zerop (val))
11473 sel[i] = nelts + i;
11474 else /* Currently unreachable. */
11475 return NULL_TREE;
11476 }
11477 tree t = fold_vec_perm (type, arg1, arg2, sel);
11478 if (t != NULL_TREE)
11479 return t;
11480 }
11481 }
11482
11483 /* If we have A op B ? A : C, we may be able to convert this to a
11484 simpler expression, depending on the operation and the values
11485 of B and C. Signed zeros prevent all of these transformations,
11486 for reasons given above each one.
11487
11488 Also try swapping the arguments and inverting the conditional. */
11489 if (COMPARISON_CLASS_P (arg0)
11490 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11491 arg1, TREE_OPERAND (arg0, 1))
11492 && !HONOR_SIGNED_ZEROS (element_mode (arg1)))
11493 {
11494 tem = fold_cond_expr_with_comparison (loc, type, arg0, op1, op2);
11495 if (tem)
11496 return tem;
11497 }
11498
11499 if (COMPARISON_CLASS_P (arg0)
11500 && operand_equal_for_comparison_p (TREE_OPERAND (arg0, 0),
11501 op2,
11502 TREE_OPERAND (arg0, 1))
11503 && !HONOR_SIGNED_ZEROS (element_mode (op2)))
11504 {
11505 location_t loc0 = expr_location_or (arg0, loc);
11506 tem = fold_invert_truthvalue (loc0, arg0);
11507 if (tem && COMPARISON_CLASS_P (tem))
11508 {
11509 tem = fold_cond_expr_with_comparison (loc, type, tem, op2, op1);
11510 if (tem)
11511 return tem;
11512 }
11513 }
11514
11515 /* If the second operand is simpler than the third, swap them
11516 since that produces better jump optimization results. */
11517 if (truth_value_p (TREE_CODE (arg0))
11518 && tree_swap_operands_p (op1, op2, false))
11519 {
11520 location_t loc0 = expr_location_or (arg0, loc);
11521 /* See if this can be inverted. If it can't, possibly because
11522 it was a floating-point inequality comparison, don't do
11523 anything. */
11524 tem = fold_invert_truthvalue (loc0, arg0);
11525 if (tem)
11526 return fold_build3_loc (loc, code, type, tem, op2, op1);
11527 }
11528
11529 /* Convert A ? 1 : 0 to simply A. */
11530 if ((code == VEC_COND_EXPR ? integer_all_onesp (op1)
11531 : (integer_onep (op1)
11532 && !VECTOR_TYPE_P (type)))
11533 && integer_zerop (op2)
11534 /* If we try to convert OP0 to our type, the
11535 call to fold will try to move the conversion inside
11536 a COND, which will recurse. In that case, the COND_EXPR
11537 is probably the best choice, so leave it alone. */
11538 && type == TREE_TYPE (arg0))
11539 return pedantic_non_lvalue_loc (loc, arg0);
11540
11541 /* Convert A ? 0 : 1 to !A. This prefers the use of NOT_EXPR
11542 over COND_EXPR in cases such as floating point comparisons. */
11543 if (integer_zerop (op1)
11544 && (code == VEC_COND_EXPR ? integer_all_onesp (op2)
11545 : (integer_onep (op2)
11546 && !VECTOR_TYPE_P (type)))
11547 && truth_value_p (TREE_CODE (arg0)))
11548 return pedantic_non_lvalue_loc (loc,
11549 fold_convert_loc (loc, type,
11550 invert_truthvalue_loc (loc,
11551 arg0)));
11552
11553 /* A < 0 ? <sign bit of A> : 0 is simply (A & <sign bit of A>). */
11554 if (TREE_CODE (arg0) == LT_EXPR
11555 && integer_zerop (TREE_OPERAND (arg0, 1))
11556 && integer_zerop (op2)
11557 && (tem = sign_bit_p (TREE_OPERAND (arg0, 0), arg1)))
11558 {
11559 /* sign_bit_p looks through both zero and sign extensions,
11560 but for this optimization only sign extensions are
11561 usable. */
11562 tree tem2 = TREE_OPERAND (arg0, 0);
11563 while (tem != tem2)
11564 {
11565 if (TREE_CODE (tem2) != NOP_EXPR
11566 || TYPE_UNSIGNED (TREE_TYPE (TREE_OPERAND (tem2, 0))))
11567 {
11568 tem = NULL_TREE;
11569 break;
11570 }
11571 tem2 = TREE_OPERAND (tem2, 0);
11572 }
11573 /* sign_bit_p only checks ARG1 bits within A's precision.
11574 If <sign bit of A> has wider type than A, bits outside
11575 of A's precision in <sign bit of A> need to be checked.
11576 If they are all 0, this optimization needs to be done
11577 in unsigned A's type, if they are all 1 in signed A's type,
11578 otherwise this can't be done. */
11579 if (tem
11580 && TYPE_PRECISION (TREE_TYPE (tem))
11581 < TYPE_PRECISION (TREE_TYPE (arg1))
11582 && TYPE_PRECISION (TREE_TYPE (tem))
11583 < TYPE_PRECISION (type))
11584 {
11585 int inner_width, outer_width;
11586 tree tem_type;
11587
11588 inner_width = TYPE_PRECISION (TREE_TYPE (tem));
11589 outer_width = TYPE_PRECISION (TREE_TYPE (arg1));
11590 if (outer_width > TYPE_PRECISION (type))
11591 outer_width = TYPE_PRECISION (type);
11592
11593 wide_int mask = wi::shifted_mask
11594 (inner_width, outer_width - inner_width, false,
11595 TYPE_PRECISION (TREE_TYPE (arg1)));
11596
11597 wide_int common = mask & arg1;
11598 if (common == mask)
11599 {
11600 tem_type = signed_type_for (TREE_TYPE (tem));
11601 tem = fold_convert_loc (loc, tem_type, tem);
11602 }
11603 else if (common == 0)
11604 {
11605 tem_type = unsigned_type_for (TREE_TYPE (tem));
11606 tem = fold_convert_loc (loc, tem_type, tem);
11607 }
11608 else
11609 tem = NULL;
11610 }
11611
11612 if (tem)
11613 return
11614 fold_convert_loc (loc, type,
11615 fold_build2_loc (loc, BIT_AND_EXPR,
11616 TREE_TYPE (tem), tem,
11617 fold_convert_loc (loc,
11618 TREE_TYPE (tem),
11619 arg1)));
11620 }
11621
11622 /* (A >> N) & 1 ? (1 << N) : 0 is simply A & (1 << N). A & 1 was
11623 already handled above. */
11624 if (TREE_CODE (arg0) == BIT_AND_EXPR
11625 && integer_onep (TREE_OPERAND (arg0, 1))
11626 && integer_zerop (op2)
11627 && integer_pow2p (arg1))
11628 {
11629 tree tem = TREE_OPERAND (arg0, 0);
11630 STRIP_NOPS (tem);
11631 if (TREE_CODE (tem) == RSHIFT_EXPR
11632 && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))
11633 && (unsigned HOST_WIDE_INT) tree_log2 (arg1) ==
11634 tree_to_uhwi (TREE_OPERAND (tem, 1)))
11635 return fold_build2_loc (loc, BIT_AND_EXPR, type,
11636 TREE_OPERAND (tem, 0), arg1);
11637 }
11638
11639 /* A & N ? N : 0 is simply A & N if N is a power of two. This
11640 is probably obsolete because the first operand should be a
11641 truth value (that's why we have the two cases above), but let's
11642 leave it in until we can confirm this for all front-ends. */
11643 if (integer_zerop (op2)
11644 && TREE_CODE (arg0) == NE_EXPR
11645 && integer_zerop (TREE_OPERAND (arg0, 1))
11646 && integer_pow2p (arg1)
11647 && TREE_CODE (TREE_OPERAND (arg0, 0)) == BIT_AND_EXPR
11648 && operand_equal_p (TREE_OPERAND (TREE_OPERAND (arg0, 0), 1),
11649 arg1, OEP_ONLY_CONST))
11650 return pedantic_non_lvalue_loc (loc,
11651 fold_convert_loc (loc, type,
11652 TREE_OPERAND (arg0, 0)));
11653
11654 /* Disable the transformations below for vectors, since
11655 fold_binary_op_with_conditional_arg may undo them immediately,
11656 yielding an infinite loop. */
11657 if (code == VEC_COND_EXPR)
11658 return NULL_TREE;
11659
11660 /* Convert A ? B : 0 into A && B if A and B are truth values. */
11661 if (integer_zerop (op2)
11662 && truth_value_p (TREE_CODE (arg0))
11663 && truth_value_p (TREE_CODE (arg1))
11664 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11665 return fold_build2_loc (loc, code == VEC_COND_EXPR ? BIT_AND_EXPR
11666 : TRUTH_ANDIF_EXPR,
11667 type, fold_convert_loc (loc, type, arg0), arg1);
11668
11669 /* Convert A ? B : 1 into !A || B if A and B are truth values. */
11670 if (code == VEC_COND_EXPR ? integer_all_onesp (op2) : integer_onep (op2)
11671 && truth_value_p (TREE_CODE (arg0))
11672 && truth_value_p (TREE_CODE (arg1))
11673 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11674 {
11675 location_t loc0 = expr_location_or (arg0, loc);
11676 /* Only perform transformation if ARG0 is easily inverted. */
11677 tem = fold_invert_truthvalue (loc0, arg0);
11678 if (tem)
11679 return fold_build2_loc (loc, code == VEC_COND_EXPR
11680 ? BIT_IOR_EXPR
11681 : TRUTH_ORIF_EXPR,
11682 type, fold_convert_loc (loc, type, tem),
11683 arg1);
11684 }
11685
11686 /* Convert A ? 0 : B into !A && B if A and B are truth values. */
11687 if (integer_zerop (arg1)
11688 && truth_value_p (TREE_CODE (arg0))
11689 && truth_value_p (TREE_CODE (op2))
11690 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11691 {
11692 location_t loc0 = expr_location_or (arg0, loc);
11693 /* Only perform transformation if ARG0 is easily inverted. */
11694 tem = fold_invert_truthvalue (loc0, arg0);
11695 if (tem)
11696 return fold_build2_loc (loc, code == VEC_COND_EXPR
11697 ? BIT_AND_EXPR : TRUTH_ANDIF_EXPR,
11698 type, fold_convert_loc (loc, type, tem),
11699 op2);
11700 }
11701
11702 /* Convert A ? 1 : B into A || B if A and B are truth values. */
11703 if (code == VEC_COND_EXPR ? integer_all_onesp (arg1) : integer_onep (arg1)
11704 && truth_value_p (TREE_CODE (arg0))
11705 && truth_value_p (TREE_CODE (op2))
11706 && (code == VEC_COND_EXPR || !VECTOR_TYPE_P (type)))
11707 return fold_build2_loc (loc, code == VEC_COND_EXPR
11708 ? BIT_IOR_EXPR : TRUTH_ORIF_EXPR,
11709 type, fold_convert_loc (loc, type, arg0), op2);
11710
11711 return NULL_TREE;
11712
11713 case CALL_EXPR:
11714 /* CALL_EXPRs used to be ternary exprs. Catch any mistaken uses
11715 of fold_ternary on them. */
11716 gcc_unreachable ();
11717
11718 case BIT_FIELD_REF:
11719 if ((TREE_CODE (arg0) == VECTOR_CST
11720 || (TREE_CODE (arg0) == CONSTRUCTOR
11721 && TREE_CODE (TREE_TYPE (arg0)) == VECTOR_TYPE))
11722 && (type == TREE_TYPE (TREE_TYPE (arg0))
11723 || (TREE_CODE (type) == VECTOR_TYPE
11724 && TREE_TYPE (type) == TREE_TYPE (TREE_TYPE (arg0)))))
11725 {
11726 tree eltype = TREE_TYPE (TREE_TYPE (arg0));
11727 unsigned HOST_WIDE_INT width = tree_to_uhwi (TYPE_SIZE (eltype));
11728 unsigned HOST_WIDE_INT n = tree_to_uhwi (arg1);
11729 unsigned HOST_WIDE_INT idx = tree_to_uhwi (op2);
11730
11731 if (n != 0
11732 && (idx % width) == 0
11733 && (n % width) == 0
11734 && ((idx + n) / width) <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (arg0)))
11735 {
11736 idx = idx / width;
11737 n = n / width;
11738
11739 if (TREE_CODE (arg0) == VECTOR_CST)
11740 {
11741 if (n == 1)
11742 return VECTOR_CST_ELT (arg0, idx);
11743
11744 tree *vals = XALLOCAVEC (tree, n);
11745 for (unsigned i = 0; i < n; ++i)
11746 vals[i] = VECTOR_CST_ELT (arg0, idx + i);
11747 return build_vector (type, vals);
11748 }
11749
11750 /* Constructor elements can be subvectors. */
11751 unsigned HOST_WIDE_INT k = 1;
11752 if (CONSTRUCTOR_NELTS (arg0) != 0)
11753 {
11754 tree cons_elem = TREE_TYPE (CONSTRUCTOR_ELT (arg0, 0)->value);
11755 if (TREE_CODE (cons_elem) == VECTOR_TYPE)
11756 k = TYPE_VECTOR_SUBPARTS (cons_elem);
11757 }
11758
11759 /* We keep an exact subset of the constructor elements. */
11760 if ((idx % k) == 0 && (n % k) == 0)
11761 {
11762 if (CONSTRUCTOR_NELTS (arg0) == 0)
11763 return build_constructor (type, NULL);
11764 idx /= k;
11765 n /= k;
11766 if (n == 1)
11767 {
11768 if (idx < CONSTRUCTOR_NELTS (arg0))
11769 return CONSTRUCTOR_ELT (arg0, idx)->value;
11770 return build_zero_cst (type);
11771 }
11772
11773 vec<constructor_elt, va_gc> *vals;
11774 vec_alloc (vals, n);
11775 for (unsigned i = 0;
11776 i < n && idx + i < CONSTRUCTOR_NELTS (arg0);
11777 ++i)
11778 CONSTRUCTOR_APPEND_ELT (vals, NULL_TREE,
11779 CONSTRUCTOR_ELT
11780 (arg0, idx + i)->value);
11781 return build_constructor (type, vals);
11782 }
11783 /* The bitfield references a single constructor element. */
11784 else if (idx + n <= (idx / k + 1) * k)
11785 {
11786 if (CONSTRUCTOR_NELTS (arg0) <= idx / k)
11787 return build_zero_cst (type);
11788 else if (n == k)
11789 return CONSTRUCTOR_ELT (arg0, idx / k)->value;
11790 else
11791 return fold_build3_loc (loc, code, type,
11792 CONSTRUCTOR_ELT (arg0, idx / k)->value, op1,
11793 build_int_cst (TREE_TYPE (op2), (idx % k) * width));
11794 }
11795 }
11796 }
11797
11798 /* A bit-field-ref that referenced the full argument can be stripped. */
11799 if (INTEGRAL_TYPE_P (TREE_TYPE (arg0))
11800 && TYPE_PRECISION (TREE_TYPE (arg0)) == tree_to_uhwi (arg1)
11801 && integer_zerop (op2))
11802 return fold_convert_loc (loc, type, arg0);
11803
11804 /* On constants we can use native encode/interpret to constant
11805 fold (nearly) all BIT_FIELD_REFs. */
11806 if (CONSTANT_CLASS_P (arg0)
11807 && can_native_interpret_type_p (type)
11808 && tree_fits_uhwi_p (TYPE_SIZE_UNIT (TREE_TYPE (arg0)))
11809 /* This limitation should not be necessary, we just need to
11810 round this up to mode size. */
11811 && tree_to_uhwi (op1) % BITS_PER_UNIT == 0
11812 /* Need bit-shifting of the buffer to relax the following. */
11813 && tree_to_uhwi (op2) % BITS_PER_UNIT == 0)
11814 {
11815 unsigned HOST_WIDE_INT bitpos = tree_to_uhwi (op2);
11816 unsigned HOST_WIDE_INT bitsize = tree_to_uhwi (op1);
11817 unsigned HOST_WIDE_INT clen;
11818 clen = tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (arg0)));
11819 /* ??? We cannot tell native_encode_expr to start at
11820 some random byte only. So limit us to a reasonable amount
11821 of work. */
11822 if (clen <= 4096)
11823 {
11824 unsigned char *b = XALLOCAVEC (unsigned char, clen);
11825 unsigned HOST_WIDE_INT len = native_encode_expr (arg0, b, clen);
11826 if (len > 0
11827 && len * BITS_PER_UNIT >= bitpos + bitsize)
11828 {
11829 tree v = native_interpret_expr (type,
11830 b + bitpos / BITS_PER_UNIT,
11831 bitsize / BITS_PER_UNIT);
11832 if (v)
11833 return v;
11834 }
11835 }
11836 }
11837
11838 return NULL_TREE;
11839
11840 case FMA_EXPR:
11841 /* For integers we can decompose the FMA if possible. */
11842 if (TREE_CODE (arg0) == INTEGER_CST
11843 && TREE_CODE (arg1) == INTEGER_CST)
11844 return fold_build2_loc (loc, PLUS_EXPR, type,
11845 const_binop (MULT_EXPR, arg0, arg1), arg2);
11846 if (integer_zerop (arg2))
11847 return fold_build2_loc (loc, MULT_EXPR, type, arg0, arg1);
11848
11849 return fold_fma (loc, type, arg0, arg1, arg2);
11850
11851 case VEC_PERM_EXPR:
11852 if (TREE_CODE (arg2) == VECTOR_CST)
11853 {
11854 unsigned int nelts = TYPE_VECTOR_SUBPARTS (type), i, mask, mask2;
11855 unsigned char *sel = XALLOCAVEC (unsigned char, 2 * nelts);
11856 unsigned char *sel2 = sel + nelts;
11857 bool need_mask_canon = false;
11858 bool need_mask_canon2 = false;
11859 bool all_in_vec0 = true;
11860 bool all_in_vec1 = true;
11861 bool maybe_identity = true;
11862 bool single_arg = (op0 == op1);
11863 bool changed = false;
11864
11865 mask2 = 2 * nelts - 1;
11866 mask = single_arg ? (nelts - 1) : mask2;
11867 gcc_assert (nelts == VECTOR_CST_NELTS (arg2));
11868 for (i = 0; i < nelts; i++)
11869 {
11870 tree val = VECTOR_CST_ELT (arg2, i);
11871 if (TREE_CODE (val) != INTEGER_CST)
11872 return NULL_TREE;
11873
11874 /* Make sure that the perm value is in an acceptable
11875 range. */
11876 wide_int t = val;
11877 need_mask_canon |= wi::gtu_p (t, mask);
11878 need_mask_canon2 |= wi::gtu_p (t, mask2);
11879 sel[i] = t.to_uhwi () & mask;
11880 sel2[i] = t.to_uhwi () & mask2;
11881
11882 if (sel[i] < nelts)
11883 all_in_vec1 = false;
11884 else
11885 all_in_vec0 = false;
11886
11887 if ((sel[i] & (nelts-1)) != i)
11888 maybe_identity = false;
11889 }
11890
11891 if (maybe_identity)
11892 {
11893 if (all_in_vec0)
11894 return op0;
11895 if (all_in_vec1)
11896 return op1;
11897 }
11898
11899 if (all_in_vec0)
11900 op1 = op0;
11901 else if (all_in_vec1)
11902 {
11903 op0 = op1;
11904 for (i = 0; i < nelts; i++)
11905 sel[i] -= nelts;
11906 need_mask_canon = true;
11907 }
11908
11909 if ((TREE_CODE (op0) == VECTOR_CST
11910 || TREE_CODE (op0) == CONSTRUCTOR)
11911 && (TREE_CODE (op1) == VECTOR_CST
11912 || TREE_CODE (op1) == CONSTRUCTOR))
11913 {
11914 tree t = fold_vec_perm (type, op0, op1, sel);
11915 if (t != NULL_TREE)
11916 return t;
11917 }
11918
11919 if (op0 == op1 && !single_arg)
11920 changed = true;
11921
11922 /* Some targets are deficient and fail to expand a single
11923 argument permutation while still allowing an equivalent
11924 2-argument version. */
11925 if (need_mask_canon && arg2 == op2
11926 && !can_vec_perm_p (TYPE_MODE (type), false, sel)
11927 && can_vec_perm_p (TYPE_MODE (type), false, sel2))
11928 {
11929 need_mask_canon = need_mask_canon2;
11930 sel = sel2;
11931 }
11932
11933 if (need_mask_canon && arg2 == op2)
11934 {
11935 tree *tsel = XALLOCAVEC (tree, nelts);
11936 tree eltype = TREE_TYPE (TREE_TYPE (arg2));
11937 for (i = 0; i < nelts; i++)
11938 tsel[i] = build_int_cst (eltype, sel[i]);
11939 op2 = build_vector (TREE_TYPE (arg2), tsel);
11940 changed = true;
11941 }
11942
11943 if (changed)
11944 return build3_loc (loc, VEC_PERM_EXPR, type, op0, op1, op2);
11945 }
11946 return NULL_TREE;
11947
11948 default:
11949 return NULL_TREE;
11950 } /* switch (code) */
11951 }
11952
11953 /* Gets the element ACCESS_INDEX from CTOR, which must be a CONSTRUCTOR
11954 of an array (or vector). */
11955
11956 tree
11957 get_array_ctor_element_at_index (tree ctor, offset_int access_index)
11958 {
11959 tree index_type = NULL_TREE;
11960 offset_int low_bound = 0;
11961
11962 if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE)
11963 {
11964 tree domain_type = TYPE_DOMAIN (TREE_TYPE (ctor));
11965 if (domain_type && TYPE_MIN_VALUE (domain_type))
11966 {
11967 /* Static constructors for variably sized objects makes no sense. */
11968 gcc_assert (TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST);
11969 index_type = TREE_TYPE (TYPE_MIN_VALUE (domain_type));
11970 low_bound = wi::to_offset (TYPE_MIN_VALUE (domain_type));
11971 }
11972 }
11973
11974 if (index_type)
11975 access_index = wi::ext (access_index, TYPE_PRECISION (index_type),
11976 TYPE_SIGN (index_type));
11977
11978 offset_int index = low_bound - 1;
11979 if (index_type)
11980 index = wi::ext (index, TYPE_PRECISION (index_type),
11981 TYPE_SIGN (index_type));
11982
11983 offset_int max_index;
11984 unsigned HOST_WIDE_INT cnt;
11985 tree cfield, cval;
11986
11987 FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
11988 {
11989 /* Array constructor might explicitly set index, or specify a range,
11990 or leave index NULL meaning that it is next index after previous
11991 one. */
11992 if (cfield)
11993 {
11994 if (TREE_CODE (cfield) == INTEGER_CST)
11995 max_index = index = wi::to_offset (cfield);
11996 else
11997 {
11998 gcc_assert (TREE_CODE (cfield) == RANGE_EXPR);
11999 index = wi::to_offset (TREE_OPERAND (cfield, 0));
12000 max_index = wi::to_offset (TREE_OPERAND (cfield, 1));
12001 }
12002 }
12003 else
12004 {
12005 index += 1;
12006 if (index_type)
12007 index = wi::ext (index, TYPE_PRECISION (index_type),
12008 TYPE_SIGN (index_type));
12009 max_index = index;
12010 }
12011
12012 /* Do we have match? */
12013 if (wi::cmpu (access_index, index) >= 0
12014 && wi::cmpu (access_index, max_index) <= 0)
12015 return cval;
12016 }
12017 return NULL_TREE;
12018 }
12019
12020 /* Perform constant folding and related simplification of EXPR.
12021 The related simplifications include x*1 => x, x*0 => 0, etc.,
12022 and application of the associative law.
12023 NOP_EXPR conversions may be removed freely (as long as we
12024 are careful not to change the type of the overall expression).
12025 We cannot simplify through a CONVERT_EXPR, FIX_EXPR or FLOAT_EXPR,
12026 but we can constant-fold them if they have constant operands. */
12027
12028 #ifdef ENABLE_FOLD_CHECKING
12029 # define fold(x) fold_1 (x)
12030 static tree fold_1 (tree);
12031 static
12032 #endif
12033 tree
12034 fold (tree expr)
12035 {
12036 const tree t = expr;
12037 enum tree_code code = TREE_CODE (t);
12038 enum tree_code_class kind = TREE_CODE_CLASS (code);
12039 tree tem;
12040 location_t loc = EXPR_LOCATION (expr);
12041
12042 /* Return right away if a constant. */
12043 if (kind == tcc_constant)
12044 return t;
12045
12046 /* CALL_EXPR-like objects with variable numbers of operands are
12047 treated specially. */
12048 if (kind == tcc_vl_exp)
12049 {
12050 if (code == CALL_EXPR)
12051 {
12052 tem = fold_call_expr (loc, expr, false);
12053 return tem ? tem : expr;
12054 }
12055 return expr;
12056 }
12057
12058 if (IS_EXPR_CODE_CLASS (kind))
12059 {
12060 tree type = TREE_TYPE (t);
12061 tree op0, op1, op2;
12062
12063 switch (TREE_CODE_LENGTH (code))
12064 {
12065 case 1:
12066 op0 = TREE_OPERAND (t, 0);
12067 tem = fold_unary_loc (loc, code, type, op0);
12068 return tem ? tem : expr;
12069 case 2:
12070 op0 = TREE_OPERAND (t, 0);
12071 op1 = TREE_OPERAND (t, 1);
12072 tem = fold_binary_loc (loc, code, type, op0, op1);
12073 return tem ? tem : expr;
12074 case 3:
12075 op0 = TREE_OPERAND (t, 0);
12076 op1 = TREE_OPERAND (t, 1);
12077 op2 = TREE_OPERAND (t, 2);
12078 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12079 return tem ? tem : expr;
12080 default:
12081 break;
12082 }
12083 }
12084
12085 switch (code)
12086 {
12087 case ARRAY_REF:
12088 {
12089 tree op0 = TREE_OPERAND (t, 0);
12090 tree op1 = TREE_OPERAND (t, 1);
12091
12092 if (TREE_CODE (op1) == INTEGER_CST
12093 && TREE_CODE (op0) == CONSTRUCTOR
12094 && ! type_contains_placeholder_p (TREE_TYPE (op0)))
12095 {
12096 tree val = get_array_ctor_element_at_index (op0,
12097 wi::to_offset (op1));
12098 if (val)
12099 return val;
12100 }
12101
12102 return t;
12103 }
12104
12105 /* Return a VECTOR_CST if possible. */
12106 case CONSTRUCTOR:
12107 {
12108 tree type = TREE_TYPE (t);
12109 if (TREE_CODE (type) != VECTOR_TYPE)
12110 return t;
12111
12112 unsigned i;
12113 tree val;
12114 FOR_EACH_CONSTRUCTOR_VALUE (CONSTRUCTOR_ELTS (t), i, val)
12115 if (! CONSTANT_CLASS_P (val))
12116 return t;
12117
12118 return build_vector_from_ctor (type, CONSTRUCTOR_ELTS (t));
12119 }
12120
12121 case CONST_DECL:
12122 return fold (DECL_INITIAL (t));
12123
12124 default:
12125 return t;
12126 } /* switch (code) */
12127 }
12128
12129 #ifdef ENABLE_FOLD_CHECKING
12130 #undef fold
12131
12132 static void fold_checksum_tree (const_tree, struct md5_ctx *,
12133 hash_table<nofree_ptr_hash<const tree_node> > *);
12134 static void fold_check_failed (const_tree, const_tree);
12135 void print_fold_checksum (const_tree);
12136
12137 /* When --enable-checking=fold, compute a digest of expr before
12138 and after actual fold call to see if fold did not accidentally
12139 change original expr. */
12140
12141 tree
12142 fold (tree expr)
12143 {
12144 tree ret;
12145 struct md5_ctx ctx;
12146 unsigned char checksum_before[16], checksum_after[16];
12147 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12148
12149 md5_init_ctx (&ctx);
12150 fold_checksum_tree (expr, &ctx, &ht);
12151 md5_finish_ctx (&ctx, checksum_before);
12152 ht.empty ();
12153
12154 ret = fold_1 (expr);
12155
12156 md5_init_ctx (&ctx);
12157 fold_checksum_tree (expr, &ctx, &ht);
12158 md5_finish_ctx (&ctx, checksum_after);
12159
12160 if (memcmp (checksum_before, checksum_after, 16))
12161 fold_check_failed (expr, ret);
12162
12163 return ret;
12164 }
12165
12166 void
12167 print_fold_checksum (const_tree expr)
12168 {
12169 struct md5_ctx ctx;
12170 unsigned char checksum[16], cnt;
12171 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12172
12173 md5_init_ctx (&ctx);
12174 fold_checksum_tree (expr, &ctx, &ht);
12175 md5_finish_ctx (&ctx, checksum);
12176 for (cnt = 0; cnt < 16; ++cnt)
12177 fprintf (stderr, "%02x", checksum[cnt]);
12178 putc ('\n', stderr);
12179 }
12180
12181 static void
12182 fold_check_failed (const_tree expr ATTRIBUTE_UNUSED, const_tree ret ATTRIBUTE_UNUSED)
12183 {
12184 internal_error ("fold check: original tree changed by fold");
12185 }
12186
12187 static void
12188 fold_checksum_tree (const_tree expr, struct md5_ctx *ctx,
12189 hash_table<nofree_ptr_hash <const tree_node> > *ht)
12190 {
12191 const tree_node **slot;
12192 enum tree_code code;
12193 union tree_node buf;
12194 int i, len;
12195
12196 recursive_label:
12197 if (expr == NULL)
12198 return;
12199 slot = ht->find_slot (expr, INSERT);
12200 if (*slot != NULL)
12201 return;
12202 *slot = expr;
12203 code = TREE_CODE (expr);
12204 if (TREE_CODE_CLASS (code) == tcc_declaration
12205 && HAS_DECL_ASSEMBLER_NAME_P (expr))
12206 {
12207 /* Allow DECL_ASSEMBLER_NAME and symtab_node to be modified. */
12208 memcpy ((char *) &buf, expr, tree_size (expr));
12209 SET_DECL_ASSEMBLER_NAME ((tree)&buf, NULL);
12210 buf.decl_with_vis.symtab_node = NULL;
12211 expr = (tree) &buf;
12212 }
12213 else if (TREE_CODE_CLASS (code) == tcc_type
12214 && (TYPE_POINTER_TO (expr)
12215 || TYPE_REFERENCE_TO (expr)
12216 || TYPE_CACHED_VALUES_P (expr)
12217 || TYPE_CONTAINS_PLACEHOLDER_INTERNAL (expr)
12218 || TYPE_NEXT_VARIANT (expr)))
12219 {
12220 /* Allow these fields to be modified. */
12221 tree tmp;
12222 memcpy ((char *) &buf, expr, tree_size (expr));
12223 expr = tmp = (tree) &buf;
12224 TYPE_CONTAINS_PLACEHOLDER_INTERNAL (tmp) = 0;
12225 TYPE_POINTER_TO (tmp) = NULL;
12226 TYPE_REFERENCE_TO (tmp) = NULL;
12227 TYPE_NEXT_VARIANT (tmp) = NULL;
12228 if (TYPE_CACHED_VALUES_P (tmp))
12229 {
12230 TYPE_CACHED_VALUES_P (tmp) = 0;
12231 TYPE_CACHED_VALUES (tmp) = NULL;
12232 }
12233 }
12234 md5_process_bytes (expr, tree_size (expr), ctx);
12235 if (CODE_CONTAINS_STRUCT (code, TS_TYPED))
12236 fold_checksum_tree (TREE_TYPE (expr), ctx, ht);
12237 if (TREE_CODE_CLASS (code) != tcc_type
12238 && TREE_CODE_CLASS (code) != tcc_declaration
12239 && code != TREE_LIST
12240 && code != SSA_NAME
12241 && CODE_CONTAINS_STRUCT (code, TS_COMMON))
12242 fold_checksum_tree (TREE_CHAIN (expr), ctx, ht);
12243 switch (TREE_CODE_CLASS (code))
12244 {
12245 case tcc_constant:
12246 switch (code)
12247 {
12248 case STRING_CST:
12249 md5_process_bytes (TREE_STRING_POINTER (expr),
12250 TREE_STRING_LENGTH (expr), ctx);
12251 break;
12252 case COMPLEX_CST:
12253 fold_checksum_tree (TREE_REALPART (expr), ctx, ht);
12254 fold_checksum_tree (TREE_IMAGPART (expr), ctx, ht);
12255 break;
12256 case VECTOR_CST:
12257 for (i = 0; i < (int) VECTOR_CST_NELTS (expr); ++i)
12258 fold_checksum_tree (VECTOR_CST_ELT (expr, i), ctx, ht);
12259 break;
12260 default:
12261 break;
12262 }
12263 break;
12264 case tcc_exceptional:
12265 switch (code)
12266 {
12267 case TREE_LIST:
12268 fold_checksum_tree (TREE_PURPOSE (expr), ctx, ht);
12269 fold_checksum_tree (TREE_VALUE (expr), ctx, ht);
12270 expr = TREE_CHAIN (expr);
12271 goto recursive_label;
12272 break;
12273 case TREE_VEC:
12274 for (i = 0; i < TREE_VEC_LENGTH (expr); ++i)
12275 fold_checksum_tree (TREE_VEC_ELT (expr, i), ctx, ht);
12276 break;
12277 default:
12278 break;
12279 }
12280 break;
12281 case tcc_expression:
12282 case tcc_reference:
12283 case tcc_comparison:
12284 case tcc_unary:
12285 case tcc_binary:
12286 case tcc_statement:
12287 case tcc_vl_exp:
12288 len = TREE_OPERAND_LENGTH (expr);
12289 for (i = 0; i < len; ++i)
12290 fold_checksum_tree (TREE_OPERAND (expr, i), ctx, ht);
12291 break;
12292 case tcc_declaration:
12293 fold_checksum_tree (DECL_NAME (expr), ctx, ht);
12294 fold_checksum_tree (DECL_CONTEXT (expr), ctx, ht);
12295 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_COMMON))
12296 {
12297 fold_checksum_tree (DECL_SIZE (expr), ctx, ht);
12298 fold_checksum_tree (DECL_SIZE_UNIT (expr), ctx, ht);
12299 fold_checksum_tree (DECL_INITIAL (expr), ctx, ht);
12300 fold_checksum_tree (DECL_ABSTRACT_ORIGIN (expr), ctx, ht);
12301 fold_checksum_tree (DECL_ATTRIBUTES (expr), ctx, ht);
12302 }
12303
12304 if (CODE_CONTAINS_STRUCT (TREE_CODE (expr), TS_DECL_NON_COMMON))
12305 {
12306 if (TREE_CODE (expr) == FUNCTION_DECL)
12307 {
12308 fold_checksum_tree (DECL_VINDEX (expr), ctx, ht);
12309 fold_checksum_tree (DECL_ARGUMENTS (expr), ctx, ht);
12310 }
12311 fold_checksum_tree (DECL_RESULT_FLD (expr), ctx, ht);
12312 }
12313 break;
12314 case tcc_type:
12315 if (TREE_CODE (expr) == ENUMERAL_TYPE)
12316 fold_checksum_tree (TYPE_VALUES (expr), ctx, ht);
12317 fold_checksum_tree (TYPE_SIZE (expr), ctx, ht);
12318 fold_checksum_tree (TYPE_SIZE_UNIT (expr), ctx, ht);
12319 fold_checksum_tree (TYPE_ATTRIBUTES (expr), ctx, ht);
12320 fold_checksum_tree (TYPE_NAME (expr), ctx, ht);
12321 if (INTEGRAL_TYPE_P (expr)
12322 || SCALAR_FLOAT_TYPE_P (expr))
12323 {
12324 fold_checksum_tree (TYPE_MIN_VALUE (expr), ctx, ht);
12325 fold_checksum_tree (TYPE_MAX_VALUE (expr), ctx, ht);
12326 }
12327 fold_checksum_tree (TYPE_MAIN_VARIANT (expr), ctx, ht);
12328 if (TREE_CODE (expr) == RECORD_TYPE
12329 || TREE_CODE (expr) == UNION_TYPE
12330 || TREE_CODE (expr) == QUAL_UNION_TYPE)
12331 fold_checksum_tree (TYPE_BINFO (expr), ctx, ht);
12332 fold_checksum_tree (TYPE_CONTEXT (expr), ctx, ht);
12333 break;
12334 default:
12335 break;
12336 }
12337 }
12338
12339 /* Helper function for outputting the checksum of a tree T. When
12340 debugging with gdb, you can "define mynext" to be "next" followed
12341 by "call debug_fold_checksum (op0)", then just trace down till the
12342 outputs differ. */
12343
12344 DEBUG_FUNCTION void
12345 debug_fold_checksum (const_tree t)
12346 {
12347 int i;
12348 unsigned char checksum[16];
12349 struct md5_ctx ctx;
12350 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12351
12352 md5_init_ctx (&ctx);
12353 fold_checksum_tree (t, &ctx, &ht);
12354 md5_finish_ctx (&ctx, checksum);
12355 ht.empty ();
12356
12357 for (i = 0; i < 16; i++)
12358 fprintf (stderr, "%d ", checksum[i]);
12359
12360 fprintf (stderr, "\n");
12361 }
12362
12363 #endif
12364
12365 /* Fold a unary tree expression with code CODE of type TYPE with an
12366 operand OP0. LOC is the location of the resulting expression.
12367 Return a folded expression if successful. Otherwise, return a tree
12368 expression with code CODE of type TYPE with an operand OP0. */
12369
12370 tree
12371 fold_build1_stat_loc (location_t loc,
12372 enum tree_code code, tree type, tree op0 MEM_STAT_DECL)
12373 {
12374 tree tem;
12375 #ifdef ENABLE_FOLD_CHECKING
12376 unsigned char checksum_before[16], checksum_after[16];
12377 struct md5_ctx ctx;
12378 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12379
12380 md5_init_ctx (&ctx);
12381 fold_checksum_tree (op0, &ctx, &ht);
12382 md5_finish_ctx (&ctx, checksum_before);
12383 ht.empty ();
12384 #endif
12385
12386 tem = fold_unary_loc (loc, code, type, op0);
12387 if (!tem)
12388 tem = build1_stat_loc (loc, code, type, op0 PASS_MEM_STAT);
12389
12390 #ifdef ENABLE_FOLD_CHECKING
12391 md5_init_ctx (&ctx);
12392 fold_checksum_tree (op0, &ctx, &ht);
12393 md5_finish_ctx (&ctx, checksum_after);
12394
12395 if (memcmp (checksum_before, checksum_after, 16))
12396 fold_check_failed (op0, tem);
12397 #endif
12398 return tem;
12399 }
12400
12401 /* Fold a binary tree expression with code CODE of type TYPE with
12402 operands OP0 and OP1. LOC is the location of the resulting
12403 expression. Return a folded expression if successful. Otherwise,
12404 return a tree expression with code CODE of type TYPE with operands
12405 OP0 and OP1. */
12406
12407 tree
12408 fold_build2_stat_loc (location_t loc,
12409 enum tree_code code, tree type, tree op0, tree op1
12410 MEM_STAT_DECL)
12411 {
12412 tree tem;
12413 #ifdef ENABLE_FOLD_CHECKING
12414 unsigned char checksum_before_op0[16],
12415 checksum_before_op1[16],
12416 checksum_after_op0[16],
12417 checksum_after_op1[16];
12418 struct md5_ctx ctx;
12419 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12420
12421 md5_init_ctx (&ctx);
12422 fold_checksum_tree (op0, &ctx, &ht);
12423 md5_finish_ctx (&ctx, checksum_before_op0);
12424 ht.empty ();
12425
12426 md5_init_ctx (&ctx);
12427 fold_checksum_tree (op1, &ctx, &ht);
12428 md5_finish_ctx (&ctx, checksum_before_op1);
12429 ht.empty ();
12430 #endif
12431
12432 tem = fold_binary_loc (loc, code, type, op0, op1);
12433 if (!tem)
12434 tem = build2_stat_loc (loc, code, type, op0, op1 PASS_MEM_STAT);
12435
12436 #ifdef ENABLE_FOLD_CHECKING
12437 md5_init_ctx (&ctx);
12438 fold_checksum_tree (op0, &ctx, &ht);
12439 md5_finish_ctx (&ctx, checksum_after_op0);
12440 ht.empty ();
12441
12442 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12443 fold_check_failed (op0, tem);
12444
12445 md5_init_ctx (&ctx);
12446 fold_checksum_tree (op1, &ctx, &ht);
12447 md5_finish_ctx (&ctx, checksum_after_op1);
12448
12449 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12450 fold_check_failed (op1, tem);
12451 #endif
12452 return tem;
12453 }
12454
12455 /* Fold a ternary tree expression with code CODE of type TYPE with
12456 operands OP0, OP1, and OP2. Return a folded expression if
12457 successful. Otherwise, return a tree expression with code CODE of
12458 type TYPE with operands OP0, OP1, and OP2. */
12459
12460 tree
12461 fold_build3_stat_loc (location_t loc, enum tree_code code, tree type,
12462 tree op0, tree op1, tree op2 MEM_STAT_DECL)
12463 {
12464 tree tem;
12465 #ifdef ENABLE_FOLD_CHECKING
12466 unsigned char checksum_before_op0[16],
12467 checksum_before_op1[16],
12468 checksum_before_op2[16],
12469 checksum_after_op0[16],
12470 checksum_after_op1[16],
12471 checksum_after_op2[16];
12472 struct md5_ctx ctx;
12473 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12474
12475 md5_init_ctx (&ctx);
12476 fold_checksum_tree (op0, &ctx, &ht);
12477 md5_finish_ctx (&ctx, checksum_before_op0);
12478 ht.empty ();
12479
12480 md5_init_ctx (&ctx);
12481 fold_checksum_tree (op1, &ctx, &ht);
12482 md5_finish_ctx (&ctx, checksum_before_op1);
12483 ht.empty ();
12484
12485 md5_init_ctx (&ctx);
12486 fold_checksum_tree (op2, &ctx, &ht);
12487 md5_finish_ctx (&ctx, checksum_before_op2);
12488 ht.empty ();
12489 #endif
12490
12491 gcc_assert (TREE_CODE_CLASS (code) != tcc_vl_exp);
12492 tem = fold_ternary_loc (loc, code, type, op0, op1, op2);
12493 if (!tem)
12494 tem = build3_stat_loc (loc, code, type, op0, op1, op2 PASS_MEM_STAT);
12495
12496 #ifdef ENABLE_FOLD_CHECKING
12497 md5_init_ctx (&ctx);
12498 fold_checksum_tree (op0, &ctx, &ht);
12499 md5_finish_ctx (&ctx, checksum_after_op0);
12500 ht.empty ();
12501
12502 if (memcmp (checksum_before_op0, checksum_after_op0, 16))
12503 fold_check_failed (op0, tem);
12504
12505 md5_init_ctx (&ctx);
12506 fold_checksum_tree (op1, &ctx, &ht);
12507 md5_finish_ctx (&ctx, checksum_after_op1);
12508 ht.empty ();
12509
12510 if (memcmp (checksum_before_op1, checksum_after_op1, 16))
12511 fold_check_failed (op1, tem);
12512
12513 md5_init_ctx (&ctx);
12514 fold_checksum_tree (op2, &ctx, &ht);
12515 md5_finish_ctx (&ctx, checksum_after_op2);
12516
12517 if (memcmp (checksum_before_op2, checksum_after_op2, 16))
12518 fold_check_failed (op2, tem);
12519 #endif
12520 return tem;
12521 }
12522
12523 /* Fold a CALL_EXPR expression of type TYPE with operands FN and NARGS
12524 arguments in ARGARRAY, and a null static chain.
12525 Return a folded expression if successful. Otherwise, return a CALL_EXPR
12526 of type TYPE from the given operands as constructed by build_call_array. */
12527
12528 tree
12529 fold_build_call_array_loc (location_t loc, tree type, tree fn,
12530 int nargs, tree *argarray)
12531 {
12532 tree tem;
12533 #ifdef ENABLE_FOLD_CHECKING
12534 unsigned char checksum_before_fn[16],
12535 checksum_before_arglist[16],
12536 checksum_after_fn[16],
12537 checksum_after_arglist[16];
12538 struct md5_ctx ctx;
12539 hash_table<nofree_ptr_hash<const tree_node> > ht (32);
12540 int i;
12541
12542 md5_init_ctx (&ctx);
12543 fold_checksum_tree (fn, &ctx, &ht);
12544 md5_finish_ctx (&ctx, checksum_before_fn);
12545 ht.empty ();
12546
12547 md5_init_ctx (&ctx);
12548 for (i = 0; i < nargs; i++)
12549 fold_checksum_tree (argarray[i], &ctx, &ht);
12550 md5_finish_ctx (&ctx, checksum_before_arglist);
12551 ht.empty ();
12552 #endif
12553
12554 tem = fold_builtin_call_array (loc, type, fn, nargs, argarray);
12555 if (!tem)
12556 tem = build_call_array_loc (loc, type, fn, nargs, argarray);
12557
12558 #ifdef ENABLE_FOLD_CHECKING
12559 md5_init_ctx (&ctx);
12560 fold_checksum_tree (fn, &ctx, &ht);
12561 md5_finish_ctx (&ctx, checksum_after_fn);
12562 ht.empty ();
12563
12564 if (memcmp (checksum_before_fn, checksum_after_fn, 16))
12565 fold_check_failed (fn, tem);
12566
12567 md5_init_ctx (&ctx);
12568 for (i = 0; i < nargs; i++)
12569 fold_checksum_tree (argarray[i], &ctx, &ht);
12570 md5_finish_ctx (&ctx, checksum_after_arglist);
12571
12572 if (memcmp (checksum_before_arglist, checksum_after_arglist, 16))
12573 fold_check_failed (NULL_TREE, tem);
12574 #endif
12575 return tem;
12576 }
12577
12578 /* Perform constant folding and related simplification of initializer
12579 expression EXPR. These behave identically to "fold_buildN" but ignore
12580 potential run-time traps and exceptions that fold must preserve. */
12581
12582 #define START_FOLD_INIT \
12583 int saved_signaling_nans = flag_signaling_nans;\
12584 int saved_trapping_math = flag_trapping_math;\
12585 int saved_rounding_math = flag_rounding_math;\
12586 int saved_trapv = flag_trapv;\
12587 int saved_folding_initializer = folding_initializer;\
12588 flag_signaling_nans = 0;\
12589 flag_trapping_math = 0;\
12590 flag_rounding_math = 0;\
12591 flag_trapv = 0;\
12592 folding_initializer = 1;
12593
12594 #define END_FOLD_INIT \
12595 flag_signaling_nans = saved_signaling_nans;\
12596 flag_trapping_math = saved_trapping_math;\
12597 flag_rounding_math = saved_rounding_math;\
12598 flag_trapv = saved_trapv;\
12599 folding_initializer = saved_folding_initializer;
12600
12601 tree
12602 fold_build1_initializer_loc (location_t loc, enum tree_code code,
12603 tree type, tree op)
12604 {
12605 tree result;
12606 START_FOLD_INIT;
12607
12608 result = fold_build1_loc (loc, code, type, op);
12609
12610 END_FOLD_INIT;
12611 return result;
12612 }
12613
12614 tree
12615 fold_build2_initializer_loc (location_t loc, enum tree_code code,
12616 tree type, tree op0, tree op1)
12617 {
12618 tree result;
12619 START_FOLD_INIT;
12620
12621 result = fold_build2_loc (loc, code, type, op0, op1);
12622
12623 END_FOLD_INIT;
12624 return result;
12625 }
12626
12627 tree
12628 fold_build_call_array_initializer_loc (location_t loc, tree type, tree fn,
12629 int nargs, tree *argarray)
12630 {
12631 tree result;
12632 START_FOLD_INIT;
12633
12634 result = fold_build_call_array_loc (loc, type, fn, nargs, argarray);
12635
12636 END_FOLD_INIT;
12637 return result;
12638 }
12639
12640 #undef START_FOLD_INIT
12641 #undef END_FOLD_INIT
12642
12643 /* Determine if first argument is a multiple of second argument. Return 0 if
12644 it is not, or we cannot easily determined it to be.
12645
12646 An example of the sort of thing we care about (at this point; this routine
12647 could surely be made more general, and expanded to do what the *_DIV_EXPR's
12648 fold cases do now) is discovering that
12649
12650 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12651
12652 is a multiple of
12653
12654 SAVE_EXPR (J * 8)
12655
12656 when we know that the two SAVE_EXPR (J * 8) nodes are the same node.
12657
12658 This code also handles discovering that
12659
12660 SAVE_EXPR (I) * SAVE_EXPR (J * 8)
12661
12662 is a multiple of 8 so we don't have to worry about dealing with a
12663 possible remainder.
12664
12665 Note that we *look* inside a SAVE_EXPR only to determine how it was
12666 calculated; it is not safe for fold to do much of anything else with the
12667 internals of a SAVE_EXPR, since it cannot know when it will be evaluated
12668 at run time. For example, the latter example above *cannot* be implemented
12669 as SAVE_EXPR (I) * J or any variant thereof, since the value of J at
12670 evaluation time of the original SAVE_EXPR is not necessarily the same at
12671 the time the new expression is evaluated. The only optimization of this
12672 sort that would be valid is changing
12673
12674 SAVE_EXPR (I) * SAVE_EXPR (SAVE_EXPR (J) * 8)
12675
12676 divided by 8 to
12677
12678 SAVE_EXPR (I) * SAVE_EXPR (J)
12679
12680 (where the same SAVE_EXPR (J) is used in the original and the
12681 transformed version). */
12682
12683 int
12684 multiple_of_p (tree type, const_tree top, const_tree bottom)
12685 {
12686 if (operand_equal_p (top, bottom, 0))
12687 return 1;
12688
12689 if (TREE_CODE (type) != INTEGER_TYPE)
12690 return 0;
12691
12692 switch (TREE_CODE (top))
12693 {
12694 case BIT_AND_EXPR:
12695 /* Bitwise and provides a power of two multiple. If the mask is
12696 a multiple of BOTTOM then TOP is a multiple of BOTTOM. */
12697 if (!integer_pow2p (bottom))
12698 return 0;
12699 /* FALLTHRU */
12700
12701 case MULT_EXPR:
12702 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12703 || multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12704
12705 case PLUS_EXPR:
12706 case MINUS_EXPR:
12707 return (multiple_of_p (type, TREE_OPERAND (top, 0), bottom)
12708 && multiple_of_p (type, TREE_OPERAND (top, 1), bottom));
12709
12710 case LSHIFT_EXPR:
12711 if (TREE_CODE (TREE_OPERAND (top, 1)) == INTEGER_CST)
12712 {
12713 tree op1, t1;
12714
12715 op1 = TREE_OPERAND (top, 1);
12716 /* const_binop may not detect overflow correctly,
12717 so check for it explicitly here. */
12718 if (wi::gtu_p (TYPE_PRECISION (TREE_TYPE (size_one_node)), op1)
12719 && 0 != (t1 = fold_convert (type,
12720 const_binop (LSHIFT_EXPR,
12721 size_one_node,
12722 op1)))
12723 && !TREE_OVERFLOW (t1))
12724 return multiple_of_p (type, t1, bottom);
12725 }
12726 return 0;
12727
12728 case NOP_EXPR:
12729 /* Can't handle conversions from non-integral or wider integral type. */
12730 if ((TREE_CODE (TREE_TYPE (TREE_OPERAND (top, 0))) != INTEGER_TYPE)
12731 || (TYPE_PRECISION (type)
12732 < TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (top, 0)))))
12733 return 0;
12734
12735 /* .. fall through ... */
12736
12737 case SAVE_EXPR:
12738 return multiple_of_p (type, TREE_OPERAND (top, 0), bottom);
12739
12740 case COND_EXPR:
12741 return (multiple_of_p (type, TREE_OPERAND (top, 1), bottom)
12742 && multiple_of_p (type, TREE_OPERAND (top, 2), bottom));
12743
12744 case INTEGER_CST:
12745 if (TREE_CODE (bottom) != INTEGER_CST
12746 || integer_zerop (bottom)
12747 || (TYPE_UNSIGNED (type)
12748 && (tree_int_cst_sgn (top) < 0
12749 || tree_int_cst_sgn (bottom) < 0)))
12750 return 0;
12751 return wi::multiple_of_p (wi::to_widest (top), wi::to_widest (bottom),
12752 SIGNED);
12753
12754 default:
12755 return 0;
12756 }
12757 }
12758
12759 #define tree_expr_nonnegative_warnv_p(X, Y) \
12760 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
12761
12762 #define RECURSE(X) \
12763 ((tree_expr_nonnegative_warnv_p) (X, strict_overflow_p, depth + 1))
12764
12765 /* Return true if CODE or TYPE is known to be non-negative. */
12766
12767 static bool
12768 tree_simple_nonnegative_warnv_p (enum tree_code code, tree type)
12769 {
12770 if ((TYPE_PRECISION (type) != 1 || TYPE_UNSIGNED (type))
12771 && truth_value_p (code))
12772 /* Truth values evaluate to 0 or 1, which is nonnegative unless we
12773 have a signed:1 type (where the value is -1 and 0). */
12774 return true;
12775 return false;
12776 }
12777
12778 /* Return true if (CODE OP0) is known to be non-negative. If the return
12779 value is based on the assumption that signed overflow is undefined,
12780 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12781 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12782
12783 bool
12784 tree_unary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12785 bool *strict_overflow_p, int depth)
12786 {
12787 if (TYPE_UNSIGNED (type))
12788 return true;
12789
12790 switch (code)
12791 {
12792 case ABS_EXPR:
12793 /* We can't return 1 if flag_wrapv is set because
12794 ABS_EXPR<INT_MIN> = INT_MIN. */
12795 if (!ANY_INTEGRAL_TYPE_P (type))
12796 return true;
12797 if (TYPE_OVERFLOW_UNDEFINED (type))
12798 {
12799 *strict_overflow_p = true;
12800 return true;
12801 }
12802 break;
12803
12804 case NON_LVALUE_EXPR:
12805 case FLOAT_EXPR:
12806 case FIX_TRUNC_EXPR:
12807 return RECURSE (op0);
12808
12809 CASE_CONVERT:
12810 {
12811 tree inner_type = TREE_TYPE (op0);
12812 tree outer_type = type;
12813
12814 if (TREE_CODE (outer_type) == REAL_TYPE)
12815 {
12816 if (TREE_CODE (inner_type) == REAL_TYPE)
12817 return RECURSE (op0);
12818 if (INTEGRAL_TYPE_P (inner_type))
12819 {
12820 if (TYPE_UNSIGNED (inner_type))
12821 return true;
12822 return RECURSE (op0);
12823 }
12824 }
12825 else if (INTEGRAL_TYPE_P (outer_type))
12826 {
12827 if (TREE_CODE (inner_type) == REAL_TYPE)
12828 return RECURSE (op0);
12829 if (INTEGRAL_TYPE_P (inner_type))
12830 return TYPE_PRECISION (inner_type) < TYPE_PRECISION (outer_type)
12831 && TYPE_UNSIGNED (inner_type);
12832 }
12833 }
12834 break;
12835
12836 default:
12837 return tree_simple_nonnegative_warnv_p (code, type);
12838 }
12839
12840 /* We don't know sign of `t', so be conservative and return false. */
12841 return false;
12842 }
12843
12844 /* Return true if (CODE OP0 OP1) is known to be non-negative. If the return
12845 value is based on the assumption that signed overflow is undefined,
12846 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12847 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12848
12849 bool
12850 tree_binary_nonnegative_warnv_p (enum tree_code code, tree type, tree op0,
12851 tree op1, bool *strict_overflow_p,
12852 int depth)
12853 {
12854 if (TYPE_UNSIGNED (type))
12855 return true;
12856
12857 switch (code)
12858 {
12859 case POINTER_PLUS_EXPR:
12860 case PLUS_EXPR:
12861 if (FLOAT_TYPE_P (type))
12862 return RECURSE (op0) && RECURSE (op1);
12863
12864 /* zero_extend(x) + zero_extend(y) is non-negative if x and y are
12865 both unsigned and at least 2 bits shorter than the result. */
12866 if (TREE_CODE (type) == INTEGER_TYPE
12867 && TREE_CODE (op0) == NOP_EXPR
12868 && TREE_CODE (op1) == NOP_EXPR)
12869 {
12870 tree inner1 = TREE_TYPE (TREE_OPERAND (op0, 0));
12871 tree inner2 = TREE_TYPE (TREE_OPERAND (op1, 0));
12872 if (TREE_CODE (inner1) == INTEGER_TYPE && TYPE_UNSIGNED (inner1)
12873 && TREE_CODE (inner2) == INTEGER_TYPE && TYPE_UNSIGNED (inner2))
12874 {
12875 unsigned int prec = MAX (TYPE_PRECISION (inner1),
12876 TYPE_PRECISION (inner2)) + 1;
12877 return prec < TYPE_PRECISION (type);
12878 }
12879 }
12880 break;
12881
12882 case MULT_EXPR:
12883 if (FLOAT_TYPE_P (type) || TYPE_OVERFLOW_UNDEFINED (type))
12884 {
12885 /* x * x is always non-negative for floating point x
12886 or without overflow. */
12887 if (operand_equal_p (op0, op1, 0)
12888 || (RECURSE (op0) && RECURSE (op1)))
12889 {
12890 if (ANY_INTEGRAL_TYPE_P (type)
12891 && TYPE_OVERFLOW_UNDEFINED (type))
12892 *strict_overflow_p = true;
12893 return true;
12894 }
12895 }
12896
12897 /* zero_extend(x) * zero_extend(y) is non-negative if x and y are
12898 both unsigned and their total bits is shorter than the result. */
12899 if (TREE_CODE (type) == INTEGER_TYPE
12900 && (TREE_CODE (op0) == NOP_EXPR || TREE_CODE (op0) == INTEGER_CST)
12901 && (TREE_CODE (op1) == NOP_EXPR || TREE_CODE (op1) == INTEGER_CST))
12902 {
12903 tree inner0 = (TREE_CODE (op0) == NOP_EXPR)
12904 ? TREE_TYPE (TREE_OPERAND (op0, 0))
12905 : TREE_TYPE (op0);
12906 tree inner1 = (TREE_CODE (op1) == NOP_EXPR)
12907 ? TREE_TYPE (TREE_OPERAND (op1, 0))
12908 : TREE_TYPE (op1);
12909
12910 bool unsigned0 = TYPE_UNSIGNED (inner0);
12911 bool unsigned1 = TYPE_UNSIGNED (inner1);
12912
12913 if (TREE_CODE (op0) == INTEGER_CST)
12914 unsigned0 = unsigned0 || tree_int_cst_sgn (op0) >= 0;
12915
12916 if (TREE_CODE (op1) == INTEGER_CST)
12917 unsigned1 = unsigned1 || tree_int_cst_sgn (op1) >= 0;
12918
12919 if (TREE_CODE (inner0) == INTEGER_TYPE && unsigned0
12920 && TREE_CODE (inner1) == INTEGER_TYPE && unsigned1)
12921 {
12922 unsigned int precision0 = (TREE_CODE (op0) == INTEGER_CST)
12923 ? tree_int_cst_min_precision (op0, UNSIGNED)
12924 : TYPE_PRECISION (inner0);
12925
12926 unsigned int precision1 = (TREE_CODE (op1) == INTEGER_CST)
12927 ? tree_int_cst_min_precision (op1, UNSIGNED)
12928 : TYPE_PRECISION (inner1);
12929
12930 return precision0 + precision1 < TYPE_PRECISION (type);
12931 }
12932 }
12933 return false;
12934
12935 case BIT_AND_EXPR:
12936 case MAX_EXPR:
12937 return RECURSE (op0) || RECURSE (op1);
12938
12939 case BIT_IOR_EXPR:
12940 case BIT_XOR_EXPR:
12941 case MIN_EXPR:
12942 case RDIV_EXPR:
12943 case TRUNC_DIV_EXPR:
12944 case CEIL_DIV_EXPR:
12945 case FLOOR_DIV_EXPR:
12946 case ROUND_DIV_EXPR:
12947 return RECURSE (op0) && RECURSE (op1);
12948
12949 case TRUNC_MOD_EXPR:
12950 return RECURSE (op0);
12951
12952 case FLOOR_MOD_EXPR:
12953 return RECURSE (op1);
12954
12955 case CEIL_MOD_EXPR:
12956 case ROUND_MOD_EXPR:
12957 default:
12958 return tree_simple_nonnegative_warnv_p (code, type);
12959 }
12960
12961 /* We don't know sign of `t', so be conservative and return false. */
12962 return false;
12963 }
12964
12965 /* Return true if T is known to be non-negative. If the return
12966 value is based on the assumption that signed overflow is undefined,
12967 set *STRICT_OVERFLOW_P to true; otherwise, don't change
12968 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
12969
12970 bool
12971 tree_single_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
12972 {
12973 if (TYPE_UNSIGNED (TREE_TYPE (t)))
12974 return true;
12975
12976 switch (TREE_CODE (t))
12977 {
12978 case INTEGER_CST:
12979 return tree_int_cst_sgn (t) >= 0;
12980
12981 case REAL_CST:
12982 return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
12983
12984 case FIXED_CST:
12985 return ! FIXED_VALUE_NEGATIVE (TREE_FIXED_CST (t));
12986
12987 case COND_EXPR:
12988 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
12989
12990 case SSA_NAME:
12991 /* Limit the depth of recursion to avoid quadratic behavior.
12992 This is expected to catch almost all occurrences in practice.
12993 If this code misses important cases that unbounded recursion
12994 would not, passes that need this information could be revised
12995 to provide it through dataflow propagation. */
12996 return (!name_registered_for_update_p (t)
12997 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
12998 && gimple_stmt_nonnegative_warnv_p (SSA_NAME_DEF_STMT (t),
12999 strict_overflow_p, depth));
13000
13001 default:
13002 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13003 }
13004 }
13005
13006 /* Return true if T is known to be non-negative. If the return
13007 value is based on the assumption that signed overflow is undefined,
13008 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13009 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13010
13011 bool
13012 tree_call_nonnegative_warnv_p (tree type, combined_fn fn, tree arg0, tree arg1,
13013 bool *strict_overflow_p, int depth)
13014 {
13015 switch (fn)
13016 {
13017 CASE_CFN_ACOS:
13018 CASE_CFN_ACOSH:
13019 CASE_CFN_CABS:
13020 CASE_CFN_COSH:
13021 CASE_CFN_ERFC:
13022 CASE_CFN_EXP:
13023 CASE_CFN_EXP10:
13024 CASE_CFN_EXP2:
13025 CASE_CFN_FABS:
13026 CASE_CFN_FDIM:
13027 CASE_CFN_HYPOT:
13028 CASE_CFN_POW10:
13029 CASE_CFN_FFS:
13030 CASE_CFN_PARITY:
13031 CASE_CFN_POPCOUNT:
13032 CASE_CFN_CLZ:
13033 CASE_CFN_CLRSB:
13034 case CFN_BUILT_IN_BSWAP32:
13035 case CFN_BUILT_IN_BSWAP64:
13036 /* Always true. */
13037 return true;
13038
13039 CASE_CFN_SQRT:
13040 /* sqrt(-0.0) is -0.0. */
13041 if (!HONOR_SIGNED_ZEROS (element_mode (type)))
13042 return true;
13043 return RECURSE (arg0);
13044
13045 CASE_CFN_ASINH:
13046 CASE_CFN_ATAN:
13047 CASE_CFN_ATANH:
13048 CASE_CFN_CBRT:
13049 CASE_CFN_CEIL:
13050 CASE_CFN_ERF:
13051 CASE_CFN_EXPM1:
13052 CASE_CFN_FLOOR:
13053 CASE_CFN_FMOD:
13054 CASE_CFN_FREXP:
13055 CASE_CFN_ICEIL:
13056 CASE_CFN_IFLOOR:
13057 CASE_CFN_IRINT:
13058 CASE_CFN_IROUND:
13059 CASE_CFN_LCEIL:
13060 CASE_CFN_LDEXP:
13061 CASE_CFN_LFLOOR:
13062 CASE_CFN_LLCEIL:
13063 CASE_CFN_LLFLOOR:
13064 CASE_CFN_LLRINT:
13065 CASE_CFN_LLROUND:
13066 CASE_CFN_LRINT:
13067 CASE_CFN_LROUND:
13068 CASE_CFN_MODF:
13069 CASE_CFN_NEARBYINT:
13070 CASE_CFN_RINT:
13071 CASE_CFN_ROUND:
13072 CASE_CFN_SCALB:
13073 CASE_CFN_SCALBLN:
13074 CASE_CFN_SCALBN:
13075 CASE_CFN_SIGNBIT:
13076 CASE_CFN_SIGNIFICAND:
13077 CASE_CFN_SINH:
13078 CASE_CFN_TANH:
13079 CASE_CFN_TRUNC:
13080 /* True if the 1st argument is nonnegative. */
13081 return RECURSE (arg0);
13082
13083 CASE_CFN_FMAX:
13084 /* True if the 1st OR 2nd arguments are nonnegative. */
13085 return RECURSE (arg0) || RECURSE (arg1);
13086
13087 CASE_CFN_FMIN:
13088 /* True if the 1st AND 2nd arguments are nonnegative. */
13089 return RECURSE (arg0) && RECURSE (arg1);
13090
13091 CASE_CFN_COPYSIGN:
13092 /* True if the 2nd argument is nonnegative. */
13093 return RECURSE (arg1);
13094
13095 CASE_CFN_POWI:
13096 /* True if the 1st argument is nonnegative or the second
13097 argument is an even integer. */
13098 if (TREE_CODE (arg1) == INTEGER_CST
13099 && (TREE_INT_CST_LOW (arg1) & 1) == 0)
13100 return true;
13101 return RECURSE (arg0);
13102
13103 CASE_CFN_POW:
13104 /* True if the 1st argument is nonnegative or the second
13105 argument is an even integer valued real. */
13106 if (TREE_CODE (arg1) == REAL_CST)
13107 {
13108 REAL_VALUE_TYPE c;
13109 HOST_WIDE_INT n;
13110
13111 c = TREE_REAL_CST (arg1);
13112 n = real_to_integer (&c);
13113 if ((n & 1) == 0)
13114 {
13115 REAL_VALUE_TYPE cint;
13116 real_from_integer (&cint, VOIDmode, n, SIGNED);
13117 if (real_identical (&c, &cint))
13118 return true;
13119 }
13120 }
13121 return RECURSE (arg0);
13122
13123 default:
13124 break;
13125 }
13126 return tree_simple_nonnegative_warnv_p (CALL_EXPR, type);
13127 }
13128
13129 /* Return true if T is known to be non-negative. If the return
13130 value is based on the assumption that signed overflow is undefined,
13131 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13132 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13133
13134 static bool
13135 tree_invalid_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13136 {
13137 enum tree_code code = TREE_CODE (t);
13138 if (TYPE_UNSIGNED (TREE_TYPE (t)))
13139 return true;
13140
13141 switch (code)
13142 {
13143 case TARGET_EXPR:
13144 {
13145 tree temp = TARGET_EXPR_SLOT (t);
13146 t = TARGET_EXPR_INITIAL (t);
13147
13148 /* If the initializer is non-void, then it's a normal expression
13149 that will be assigned to the slot. */
13150 if (!VOID_TYPE_P (t))
13151 return RECURSE (t);
13152
13153 /* Otherwise, the initializer sets the slot in some way. One common
13154 way is an assignment statement at the end of the initializer. */
13155 while (1)
13156 {
13157 if (TREE_CODE (t) == BIND_EXPR)
13158 t = expr_last (BIND_EXPR_BODY (t));
13159 else if (TREE_CODE (t) == TRY_FINALLY_EXPR
13160 || TREE_CODE (t) == TRY_CATCH_EXPR)
13161 t = expr_last (TREE_OPERAND (t, 0));
13162 else if (TREE_CODE (t) == STATEMENT_LIST)
13163 t = expr_last (t);
13164 else
13165 break;
13166 }
13167 if (TREE_CODE (t) == MODIFY_EXPR
13168 && TREE_OPERAND (t, 0) == temp)
13169 return RECURSE (TREE_OPERAND (t, 1));
13170
13171 return false;
13172 }
13173
13174 case CALL_EXPR:
13175 {
13176 tree arg0 = call_expr_nargs (t) > 0 ? CALL_EXPR_ARG (t, 0) : NULL_TREE;
13177 tree arg1 = call_expr_nargs (t) > 1 ? CALL_EXPR_ARG (t, 1) : NULL_TREE;
13178
13179 return tree_call_nonnegative_warnv_p (TREE_TYPE (t),
13180 get_call_combined_fn (t),
13181 arg0,
13182 arg1,
13183 strict_overflow_p, depth);
13184 }
13185 case COMPOUND_EXPR:
13186 case MODIFY_EXPR:
13187 return RECURSE (TREE_OPERAND (t, 1));
13188
13189 case BIND_EXPR:
13190 return RECURSE (expr_last (TREE_OPERAND (t, 1)));
13191
13192 case SAVE_EXPR:
13193 return RECURSE (TREE_OPERAND (t, 0));
13194
13195 default:
13196 return tree_simple_nonnegative_warnv_p (TREE_CODE (t), TREE_TYPE (t));
13197 }
13198 }
13199
13200 #undef RECURSE
13201 #undef tree_expr_nonnegative_warnv_p
13202
13203 /* Return true if T is known to be non-negative. If the return
13204 value is based on the assumption that signed overflow is undefined,
13205 set *STRICT_OVERFLOW_P to true; otherwise, don't change
13206 *STRICT_OVERFLOW_P. DEPTH is the current nesting depth of the query. */
13207
13208 bool
13209 tree_expr_nonnegative_warnv_p (tree t, bool *strict_overflow_p, int depth)
13210 {
13211 enum tree_code code;
13212 if (t == error_mark_node)
13213 return false;
13214
13215 code = TREE_CODE (t);
13216 switch (TREE_CODE_CLASS (code))
13217 {
13218 case tcc_binary:
13219 case tcc_comparison:
13220 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13221 TREE_TYPE (t),
13222 TREE_OPERAND (t, 0),
13223 TREE_OPERAND (t, 1),
13224 strict_overflow_p, depth);
13225
13226 case tcc_unary:
13227 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13228 TREE_TYPE (t),
13229 TREE_OPERAND (t, 0),
13230 strict_overflow_p, depth);
13231
13232 case tcc_constant:
13233 case tcc_declaration:
13234 case tcc_reference:
13235 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13236
13237 default:
13238 break;
13239 }
13240
13241 switch (code)
13242 {
13243 case TRUTH_AND_EXPR:
13244 case TRUTH_OR_EXPR:
13245 case TRUTH_XOR_EXPR:
13246 return tree_binary_nonnegative_warnv_p (TREE_CODE (t),
13247 TREE_TYPE (t),
13248 TREE_OPERAND (t, 0),
13249 TREE_OPERAND (t, 1),
13250 strict_overflow_p, depth);
13251 case TRUTH_NOT_EXPR:
13252 return tree_unary_nonnegative_warnv_p (TREE_CODE (t),
13253 TREE_TYPE (t),
13254 TREE_OPERAND (t, 0),
13255 strict_overflow_p, depth);
13256
13257 case COND_EXPR:
13258 case CONSTRUCTOR:
13259 case OBJ_TYPE_REF:
13260 case ASSERT_EXPR:
13261 case ADDR_EXPR:
13262 case WITH_SIZE_EXPR:
13263 case SSA_NAME:
13264 return tree_single_nonnegative_warnv_p (t, strict_overflow_p, depth);
13265
13266 default:
13267 return tree_invalid_nonnegative_warnv_p (t, strict_overflow_p, depth);
13268 }
13269 }
13270
13271 /* Return true if `t' is known to be non-negative. Handle warnings
13272 about undefined signed overflow. */
13273
13274 bool
13275 tree_expr_nonnegative_p (tree t)
13276 {
13277 bool ret, strict_overflow_p;
13278
13279 strict_overflow_p = false;
13280 ret = tree_expr_nonnegative_warnv_p (t, &strict_overflow_p);
13281 if (strict_overflow_p)
13282 fold_overflow_warning (("assuming signed overflow does not occur when "
13283 "determining that expression is always "
13284 "non-negative"),
13285 WARN_STRICT_OVERFLOW_MISC);
13286 return ret;
13287 }
13288
13289
13290 /* Return true when (CODE OP0) is an address and is known to be nonzero.
13291 For floating point we further ensure that T is not denormal.
13292 Similar logic is present in nonzero_address in rtlanal.h.
13293
13294 If the return value is based on the assumption that signed overflow
13295 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13296 change *STRICT_OVERFLOW_P. */
13297
13298 bool
13299 tree_unary_nonzero_warnv_p (enum tree_code code, tree type, tree op0,
13300 bool *strict_overflow_p)
13301 {
13302 switch (code)
13303 {
13304 case ABS_EXPR:
13305 return tree_expr_nonzero_warnv_p (op0,
13306 strict_overflow_p);
13307
13308 case NOP_EXPR:
13309 {
13310 tree inner_type = TREE_TYPE (op0);
13311 tree outer_type = type;
13312
13313 return (TYPE_PRECISION (outer_type) >= TYPE_PRECISION (inner_type)
13314 && tree_expr_nonzero_warnv_p (op0,
13315 strict_overflow_p));
13316 }
13317 break;
13318
13319 case NON_LVALUE_EXPR:
13320 return tree_expr_nonzero_warnv_p (op0,
13321 strict_overflow_p);
13322
13323 default:
13324 break;
13325 }
13326
13327 return false;
13328 }
13329
13330 /* Return true when (CODE OP0 OP1) is an address and is known to be nonzero.
13331 For floating point we further ensure that T is not denormal.
13332 Similar logic is present in nonzero_address in rtlanal.h.
13333
13334 If the return value is based on the assumption that signed overflow
13335 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13336 change *STRICT_OVERFLOW_P. */
13337
13338 bool
13339 tree_binary_nonzero_warnv_p (enum tree_code code,
13340 tree type,
13341 tree op0,
13342 tree op1, bool *strict_overflow_p)
13343 {
13344 bool sub_strict_overflow_p;
13345 switch (code)
13346 {
13347 case POINTER_PLUS_EXPR:
13348 case PLUS_EXPR:
13349 if (ANY_INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_UNDEFINED (type))
13350 {
13351 /* With the presence of negative values it is hard
13352 to say something. */
13353 sub_strict_overflow_p = false;
13354 if (!tree_expr_nonnegative_warnv_p (op0,
13355 &sub_strict_overflow_p)
13356 || !tree_expr_nonnegative_warnv_p (op1,
13357 &sub_strict_overflow_p))
13358 return false;
13359 /* One of operands must be positive and the other non-negative. */
13360 /* We don't set *STRICT_OVERFLOW_P here: even if this value
13361 overflows, on a twos-complement machine the sum of two
13362 nonnegative numbers can never be zero. */
13363 return (tree_expr_nonzero_warnv_p (op0,
13364 strict_overflow_p)
13365 || tree_expr_nonzero_warnv_p (op1,
13366 strict_overflow_p));
13367 }
13368 break;
13369
13370 case MULT_EXPR:
13371 if (TYPE_OVERFLOW_UNDEFINED (type))
13372 {
13373 if (tree_expr_nonzero_warnv_p (op0,
13374 strict_overflow_p)
13375 && tree_expr_nonzero_warnv_p (op1,
13376 strict_overflow_p))
13377 {
13378 *strict_overflow_p = true;
13379 return true;
13380 }
13381 }
13382 break;
13383
13384 case MIN_EXPR:
13385 sub_strict_overflow_p = false;
13386 if (tree_expr_nonzero_warnv_p (op0,
13387 &sub_strict_overflow_p)
13388 && tree_expr_nonzero_warnv_p (op1,
13389 &sub_strict_overflow_p))
13390 {
13391 if (sub_strict_overflow_p)
13392 *strict_overflow_p = true;
13393 }
13394 break;
13395
13396 case MAX_EXPR:
13397 sub_strict_overflow_p = false;
13398 if (tree_expr_nonzero_warnv_p (op0,
13399 &sub_strict_overflow_p))
13400 {
13401 if (sub_strict_overflow_p)
13402 *strict_overflow_p = true;
13403
13404 /* When both operands are nonzero, then MAX must be too. */
13405 if (tree_expr_nonzero_warnv_p (op1,
13406 strict_overflow_p))
13407 return true;
13408
13409 /* MAX where operand 0 is positive is positive. */
13410 return tree_expr_nonnegative_warnv_p (op0,
13411 strict_overflow_p);
13412 }
13413 /* MAX where operand 1 is positive is positive. */
13414 else if (tree_expr_nonzero_warnv_p (op1,
13415 &sub_strict_overflow_p)
13416 && tree_expr_nonnegative_warnv_p (op1,
13417 &sub_strict_overflow_p))
13418 {
13419 if (sub_strict_overflow_p)
13420 *strict_overflow_p = true;
13421 return true;
13422 }
13423 break;
13424
13425 case BIT_IOR_EXPR:
13426 return (tree_expr_nonzero_warnv_p (op1,
13427 strict_overflow_p)
13428 || tree_expr_nonzero_warnv_p (op0,
13429 strict_overflow_p));
13430
13431 default:
13432 break;
13433 }
13434
13435 return false;
13436 }
13437
13438 /* Return true when T is an address and is known to be nonzero.
13439 For floating point we further ensure that T is not denormal.
13440 Similar logic is present in nonzero_address in rtlanal.h.
13441
13442 If the return value is based on the assumption that signed overflow
13443 is undefined, set *STRICT_OVERFLOW_P to true; otherwise, don't
13444 change *STRICT_OVERFLOW_P. */
13445
13446 bool
13447 tree_single_nonzero_warnv_p (tree t, bool *strict_overflow_p)
13448 {
13449 bool sub_strict_overflow_p;
13450 switch (TREE_CODE (t))
13451 {
13452 case INTEGER_CST:
13453 return !integer_zerop (t);
13454
13455 case ADDR_EXPR:
13456 {
13457 tree base = TREE_OPERAND (t, 0);
13458
13459 if (!DECL_P (base))
13460 base = get_base_address (base);
13461
13462 if (!base)
13463 return false;
13464
13465 /* For objects in symbol table check if we know they are non-zero.
13466 Don't do anything for variables and functions before symtab is built;
13467 it is quite possible that they will be declared weak later. */
13468 if (DECL_P (base) && decl_in_symtab_p (base))
13469 {
13470 struct symtab_node *symbol;
13471
13472 symbol = symtab_node::get_create (base);
13473 if (symbol)
13474 return symbol->nonzero_address ();
13475 else
13476 return false;
13477 }
13478
13479 /* Function local objects are never NULL. */
13480 if (DECL_P (base)
13481 && (DECL_CONTEXT (base)
13482 && TREE_CODE (DECL_CONTEXT (base)) == FUNCTION_DECL
13483 && auto_var_in_fn_p (base, DECL_CONTEXT (base))))
13484 return true;
13485
13486 /* Constants are never weak. */
13487 if (CONSTANT_CLASS_P (base))
13488 return true;
13489
13490 return false;
13491 }
13492
13493 case COND_EXPR:
13494 sub_strict_overflow_p = false;
13495 if (tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 1),
13496 &sub_strict_overflow_p)
13497 && tree_expr_nonzero_warnv_p (TREE_OPERAND (t, 2),
13498 &sub_strict_overflow_p))
13499 {
13500 if (sub_strict_overflow_p)
13501 *strict_overflow_p = true;
13502 return true;
13503 }
13504 break;
13505
13506 default:
13507 break;
13508 }
13509 return false;
13510 }
13511
13512 #define integer_valued_real_p(X) \
13513 _Pragma ("GCC error \"Use RECURSE for recursive calls\"") 0
13514
13515 #define RECURSE(X) \
13516 ((integer_valued_real_p) (X, depth + 1))
13517
13518 /* Return true if the floating point result of (CODE OP0) has an
13519 integer value. We also allow +Inf, -Inf and NaN to be considered
13520 integer values. Return false for signaling NaN.
13521
13522 DEPTH is the current nesting depth of the query. */
13523
13524 bool
13525 integer_valued_real_unary_p (tree_code code, tree op0, int depth)
13526 {
13527 switch (code)
13528 {
13529 case FLOAT_EXPR:
13530 return true;
13531
13532 case ABS_EXPR:
13533 return RECURSE (op0);
13534
13535 CASE_CONVERT:
13536 {
13537 tree type = TREE_TYPE (op0);
13538 if (TREE_CODE (type) == INTEGER_TYPE)
13539 return true;
13540 if (TREE_CODE (type) == REAL_TYPE)
13541 return RECURSE (op0);
13542 break;
13543 }
13544
13545 default:
13546 break;
13547 }
13548 return false;
13549 }
13550
13551 /* Return true if the floating point result of (CODE OP0 OP1) has an
13552 integer value. We also allow +Inf, -Inf and NaN to be considered
13553 integer values. Return false for signaling NaN.
13554
13555 DEPTH is the current nesting depth of the query. */
13556
13557 bool
13558 integer_valued_real_binary_p (tree_code code, tree op0, tree op1, int depth)
13559 {
13560 switch (code)
13561 {
13562 case PLUS_EXPR:
13563 case MINUS_EXPR:
13564 case MULT_EXPR:
13565 case MIN_EXPR:
13566 case MAX_EXPR:
13567 return RECURSE (op0) && RECURSE (op1);
13568
13569 default:
13570 break;
13571 }
13572 return false;
13573 }
13574
13575 /* Return true if the floating point result of calling FNDECL with arguments
13576 ARG0 and ARG1 has an integer value. We also allow +Inf, -Inf and NaN to be
13577 considered integer values. Return false for signaling NaN. If FNDECL
13578 takes fewer than 2 arguments, the remaining ARGn are null.
13579
13580 DEPTH is the current nesting depth of the query. */
13581
13582 bool
13583 integer_valued_real_call_p (combined_fn fn, tree arg0, tree arg1, int depth)
13584 {
13585 switch (fn)
13586 {
13587 CASE_CFN_CEIL:
13588 CASE_CFN_FLOOR:
13589 CASE_CFN_NEARBYINT:
13590 CASE_CFN_RINT:
13591 CASE_CFN_ROUND:
13592 CASE_CFN_TRUNC:
13593 return true;
13594
13595 CASE_CFN_FMIN:
13596 CASE_CFN_FMAX:
13597 return RECURSE (arg0) && RECURSE (arg1);
13598
13599 default:
13600 break;
13601 }
13602 return false;
13603 }
13604
13605 /* Return true if the floating point expression T (a GIMPLE_SINGLE_RHS)
13606 has an integer value. We also allow +Inf, -Inf and NaN to be
13607 considered integer values. Return false for signaling NaN.
13608
13609 DEPTH is the current nesting depth of the query. */
13610
13611 bool
13612 integer_valued_real_single_p (tree t, int depth)
13613 {
13614 switch (TREE_CODE (t))
13615 {
13616 case REAL_CST:
13617 return real_isinteger (TREE_REAL_CST_PTR (t), TYPE_MODE (TREE_TYPE (t)));
13618
13619 case COND_EXPR:
13620 return RECURSE (TREE_OPERAND (t, 1)) && RECURSE (TREE_OPERAND (t, 2));
13621
13622 case SSA_NAME:
13623 /* Limit the depth of recursion to avoid quadratic behavior.
13624 This is expected to catch almost all occurrences in practice.
13625 If this code misses important cases that unbounded recursion
13626 would not, passes that need this information could be revised
13627 to provide it through dataflow propagation. */
13628 return (!name_registered_for_update_p (t)
13629 && depth < PARAM_VALUE (PARAM_MAX_SSA_NAME_QUERY_DEPTH)
13630 && gimple_stmt_integer_valued_real_p (SSA_NAME_DEF_STMT (t),
13631 depth));
13632
13633 default:
13634 break;
13635 }
13636 return false;
13637 }
13638
13639 /* Return true if the floating point expression T (a GIMPLE_INVALID_RHS)
13640 has an integer value. We also allow +Inf, -Inf and NaN to be
13641 considered integer values. Return false for signaling NaN.
13642
13643 DEPTH is the current nesting depth of the query. */
13644
13645 static bool
13646 integer_valued_real_invalid_p (tree t, int depth)
13647 {
13648 switch (TREE_CODE (t))
13649 {
13650 case COMPOUND_EXPR:
13651 case MODIFY_EXPR:
13652 case BIND_EXPR:
13653 return RECURSE (TREE_OPERAND (t, 1));
13654
13655 case SAVE_EXPR:
13656 return RECURSE (TREE_OPERAND (t, 0));
13657
13658 default:
13659 break;
13660 }
13661 return false;
13662 }
13663
13664 #undef RECURSE
13665 #undef integer_valued_real_p
13666
13667 /* Return true if the floating point expression T has an integer value.
13668 We also allow +Inf, -Inf and NaN to be considered integer values.
13669 Return false for signaling NaN.
13670
13671 DEPTH is the current nesting depth of the query. */
13672
13673 bool
13674 integer_valued_real_p (tree t, int depth)
13675 {
13676 if (t == error_mark_node)
13677 return false;
13678
13679 tree_code code = TREE_CODE (t);
13680 switch (TREE_CODE_CLASS (code))
13681 {
13682 case tcc_binary:
13683 case tcc_comparison:
13684 return integer_valued_real_binary_p (code, TREE_OPERAND (t, 0),
13685 TREE_OPERAND (t, 1), depth);
13686
13687 case tcc_unary:
13688 return integer_valued_real_unary_p (code, TREE_OPERAND (t, 0), depth);
13689
13690 case tcc_constant:
13691 case tcc_declaration:
13692 case tcc_reference:
13693 return integer_valued_real_single_p (t, depth);
13694
13695 default:
13696 break;
13697 }
13698
13699 switch (code)
13700 {
13701 case COND_EXPR:
13702 case SSA_NAME:
13703 return integer_valued_real_single_p (t, depth);
13704
13705 case CALL_EXPR:
13706 {
13707 tree arg0 = (call_expr_nargs (t) > 0
13708 ? CALL_EXPR_ARG (t, 0)
13709 : NULL_TREE);
13710 tree arg1 = (call_expr_nargs (t) > 1
13711 ? CALL_EXPR_ARG (t, 1)
13712 : NULL_TREE);
13713 return integer_valued_real_call_p (get_call_combined_fn (t),
13714 arg0, arg1, depth);
13715 }
13716
13717 default:
13718 return integer_valued_real_invalid_p (t, depth);
13719 }
13720 }
13721
13722 /* Given the components of a binary expression CODE, TYPE, OP0 and OP1,
13723 attempt to fold the expression to a constant without modifying TYPE,
13724 OP0 or OP1.
13725
13726 If the expression could be simplified to a constant, then return
13727 the constant. If the expression would not be simplified to a
13728 constant, then return NULL_TREE. */
13729
13730 tree
13731 fold_binary_to_constant (enum tree_code code, tree type, tree op0, tree op1)
13732 {
13733 tree tem = fold_binary (code, type, op0, op1);
13734 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13735 }
13736
13737 /* Given the components of a unary expression CODE, TYPE and OP0,
13738 attempt to fold the expression to a constant without modifying
13739 TYPE or OP0.
13740
13741 If the expression could be simplified to a constant, then return
13742 the constant. If the expression would not be simplified to a
13743 constant, then return NULL_TREE. */
13744
13745 tree
13746 fold_unary_to_constant (enum tree_code code, tree type, tree op0)
13747 {
13748 tree tem = fold_unary (code, type, op0);
13749 return (tem && TREE_CONSTANT (tem)) ? tem : NULL_TREE;
13750 }
13751
13752 /* If EXP represents referencing an element in a constant string
13753 (either via pointer arithmetic or array indexing), return the
13754 tree representing the value accessed, otherwise return NULL. */
13755
13756 tree
13757 fold_read_from_constant_string (tree exp)
13758 {
13759 if ((TREE_CODE (exp) == INDIRECT_REF
13760 || TREE_CODE (exp) == ARRAY_REF)
13761 && TREE_CODE (TREE_TYPE (exp)) == INTEGER_TYPE)
13762 {
13763 tree exp1 = TREE_OPERAND (exp, 0);
13764 tree index;
13765 tree string;
13766 location_t loc = EXPR_LOCATION (exp);
13767
13768 if (TREE_CODE (exp) == INDIRECT_REF)
13769 string = string_constant (exp1, &index);
13770 else
13771 {
13772 tree low_bound = array_ref_low_bound (exp);
13773 index = fold_convert_loc (loc, sizetype, TREE_OPERAND (exp, 1));
13774
13775 /* Optimize the special-case of a zero lower bound.
13776
13777 We convert the low_bound to sizetype to avoid some problems
13778 with constant folding. (E.g. suppose the lower bound is 1,
13779 and its mode is QI. Without the conversion,l (ARRAY
13780 +(INDEX-(unsigned char)1)) becomes ((ARRAY+(-(unsigned char)1))
13781 +INDEX), which becomes (ARRAY+255+INDEX). Oops!) */
13782 if (! integer_zerop (low_bound))
13783 index = size_diffop_loc (loc, index,
13784 fold_convert_loc (loc, sizetype, low_bound));
13785
13786 string = exp1;
13787 }
13788
13789 if (string
13790 && TYPE_MODE (TREE_TYPE (exp)) == TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))
13791 && TREE_CODE (string) == STRING_CST
13792 && TREE_CODE (index) == INTEGER_CST
13793 && compare_tree_int (index, TREE_STRING_LENGTH (string)) < 0
13794 && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (string))))
13795 == MODE_INT)
13796 && (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (string)))) == 1))
13797 return build_int_cst_type (TREE_TYPE (exp),
13798 (TREE_STRING_POINTER (string)
13799 [TREE_INT_CST_LOW (index)]));
13800 }
13801 return NULL;
13802 }
13803
13804 /* Return the tree for neg (ARG0) when ARG0 is known to be either
13805 an integer constant, real, or fixed-point constant.
13806
13807 TYPE is the type of the result. */
13808
13809 static tree
13810 fold_negate_const (tree arg0, tree type)
13811 {
13812 tree t = NULL_TREE;
13813
13814 switch (TREE_CODE (arg0))
13815 {
13816 case INTEGER_CST:
13817 {
13818 bool overflow;
13819 wide_int val = wi::neg (arg0, &overflow);
13820 t = force_fit_type (type, val, 1,
13821 (overflow | TREE_OVERFLOW (arg0))
13822 && !TYPE_UNSIGNED (type));
13823 break;
13824 }
13825
13826 case REAL_CST:
13827 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13828 break;
13829
13830 case FIXED_CST:
13831 {
13832 FIXED_VALUE_TYPE f;
13833 bool overflow_p = fixed_arithmetic (&f, NEGATE_EXPR,
13834 &(TREE_FIXED_CST (arg0)), NULL,
13835 TYPE_SATURATING (type));
13836 t = build_fixed (type, f);
13837 /* Propagate overflow flags. */
13838 if (overflow_p | TREE_OVERFLOW (arg0))
13839 TREE_OVERFLOW (t) = 1;
13840 break;
13841 }
13842
13843 default:
13844 gcc_unreachable ();
13845 }
13846
13847 return t;
13848 }
13849
13850 /* Return the tree for abs (ARG0) when ARG0 is known to be either
13851 an integer constant or real constant.
13852
13853 TYPE is the type of the result. */
13854
13855 tree
13856 fold_abs_const (tree arg0, tree type)
13857 {
13858 tree t = NULL_TREE;
13859
13860 switch (TREE_CODE (arg0))
13861 {
13862 case INTEGER_CST:
13863 {
13864 /* If the value is unsigned or non-negative, then the absolute value
13865 is the same as the ordinary value. */
13866 if (!wi::neg_p (arg0, TYPE_SIGN (type)))
13867 t = arg0;
13868
13869 /* If the value is negative, then the absolute value is
13870 its negation. */
13871 else
13872 {
13873 bool overflow;
13874 wide_int val = wi::neg (arg0, &overflow);
13875 t = force_fit_type (type, val, -1,
13876 overflow | TREE_OVERFLOW (arg0));
13877 }
13878 }
13879 break;
13880
13881 case REAL_CST:
13882 if (REAL_VALUE_NEGATIVE (TREE_REAL_CST (arg0)))
13883 t = build_real (type, real_value_negate (&TREE_REAL_CST (arg0)));
13884 else
13885 t = arg0;
13886 break;
13887
13888 default:
13889 gcc_unreachable ();
13890 }
13891
13892 return t;
13893 }
13894
13895 /* Return the tree for not (ARG0) when ARG0 is known to be an integer
13896 constant. TYPE is the type of the result. */
13897
13898 static tree
13899 fold_not_const (const_tree arg0, tree type)
13900 {
13901 gcc_assert (TREE_CODE (arg0) == INTEGER_CST);
13902
13903 return force_fit_type (type, wi::bit_not (arg0), 0, TREE_OVERFLOW (arg0));
13904 }
13905
13906 /* Given CODE, a relational operator, the target type, TYPE and two
13907 constant operands OP0 and OP1, return the result of the
13908 relational operation. If the result is not a compile time
13909 constant, then return NULL_TREE. */
13910
13911 static tree
13912 fold_relational_const (enum tree_code code, tree type, tree op0, tree op1)
13913 {
13914 int result, invert;
13915
13916 /* From here on, the only cases we handle are when the result is
13917 known to be a constant. */
13918
13919 if (TREE_CODE (op0) == REAL_CST && TREE_CODE (op1) == REAL_CST)
13920 {
13921 const REAL_VALUE_TYPE *c0 = TREE_REAL_CST_PTR (op0);
13922 const REAL_VALUE_TYPE *c1 = TREE_REAL_CST_PTR (op1);
13923
13924 /* Handle the cases where either operand is a NaN. */
13925 if (real_isnan (c0) || real_isnan (c1))
13926 {
13927 switch (code)
13928 {
13929 case EQ_EXPR:
13930 case ORDERED_EXPR:
13931 result = 0;
13932 break;
13933
13934 case NE_EXPR:
13935 case UNORDERED_EXPR:
13936 case UNLT_EXPR:
13937 case UNLE_EXPR:
13938 case UNGT_EXPR:
13939 case UNGE_EXPR:
13940 case UNEQ_EXPR:
13941 result = 1;
13942 break;
13943
13944 case LT_EXPR:
13945 case LE_EXPR:
13946 case GT_EXPR:
13947 case GE_EXPR:
13948 case LTGT_EXPR:
13949 if (flag_trapping_math)
13950 return NULL_TREE;
13951 result = 0;
13952 break;
13953
13954 default:
13955 gcc_unreachable ();
13956 }
13957
13958 return constant_boolean_node (result, type);
13959 }
13960
13961 return constant_boolean_node (real_compare (code, c0, c1), type);
13962 }
13963
13964 if (TREE_CODE (op0) == FIXED_CST && TREE_CODE (op1) == FIXED_CST)
13965 {
13966 const FIXED_VALUE_TYPE *c0 = TREE_FIXED_CST_PTR (op0);
13967 const FIXED_VALUE_TYPE *c1 = TREE_FIXED_CST_PTR (op1);
13968 return constant_boolean_node (fixed_compare (code, c0, c1), type);
13969 }
13970
13971 /* Handle equality/inequality of complex constants. */
13972 if (TREE_CODE (op0) == COMPLEX_CST && TREE_CODE (op1) == COMPLEX_CST)
13973 {
13974 tree rcond = fold_relational_const (code, type,
13975 TREE_REALPART (op0),
13976 TREE_REALPART (op1));
13977 tree icond = fold_relational_const (code, type,
13978 TREE_IMAGPART (op0),
13979 TREE_IMAGPART (op1));
13980 if (code == EQ_EXPR)
13981 return fold_build2 (TRUTH_ANDIF_EXPR, type, rcond, icond);
13982 else if (code == NE_EXPR)
13983 return fold_build2 (TRUTH_ORIF_EXPR, type, rcond, icond);
13984 else
13985 return NULL_TREE;
13986 }
13987
13988 if (TREE_CODE (op0) == VECTOR_CST && TREE_CODE (op1) == VECTOR_CST)
13989 {
13990 unsigned count = VECTOR_CST_NELTS (op0);
13991 tree *elts = XALLOCAVEC (tree, count);
13992 gcc_assert (VECTOR_CST_NELTS (op1) == count
13993 && TYPE_VECTOR_SUBPARTS (type) == count);
13994
13995 for (unsigned i = 0; i < count; i++)
13996 {
13997 tree elem_type = TREE_TYPE (type);
13998 tree elem0 = VECTOR_CST_ELT (op0, i);
13999 tree elem1 = VECTOR_CST_ELT (op1, i);
14000
14001 tree tem = fold_relational_const (code, elem_type,
14002 elem0, elem1);
14003
14004 if (tem == NULL_TREE)
14005 return NULL_TREE;
14006
14007 elts[i] = build_int_cst (elem_type, integer_zerop (tem) ? 0 : -1);
14008 }
14009
14010 return build_vector (type, elts);
14011 }
14012
14013 /* From here on we only handle LT, LE, GT, GE, EQ and NE.
14014
14015 To compute GT, swap the arguments and do LT.
14016 To compute GE, do LT and invert the result.
14017 To compute LE, swap the arguments, do LT and invert the result.
14018 To compute NE, do EQ and invert the result.
14019
14020 Therefore, the code below must handle only EQ and LT. */
14021
14022 if (code == LE_EXPR || code == GT_EXPR)
14023 {
14024 std::swap (op0, op1);
14025 code = swap_tree_comparison (code);
14026 }
14027
14028 /* Note that it is safe to invert for real values here because we
14029 have already handled the one case that it matters. */
14030
14031 invert = 0;
14032 if (code == NE_EXPR || code == GE_EXPR)
14033 {
14034 invert = 1;
14035 code = invert_tree_comparison (code, false);
14036 }
14037
14038 /* Compute a result for LT or EQ if args permit;
14039 Otherwise return T. */
14040 if (TREE_CODE (op0) == INTEGER_CST && TREE_CODE (op1) == INTEGER_CST)
14041 {
14042 if (code == EQ_EXPR)
14043 result = tree_int_cst_equal (op0, op1);
14044 else
14045 result = tree_int_cst_lt (op0, op1);
14046 }
14047 else
14048 return NULL_TREE;
14049
14050 if (invert)
14051 result ^= 1;
14052 return constant_boolean_node (result, type);
14053 }
14054
14055 /* If necessary, return a CLEANUP_POINT_EXPR for EXPR with the
14056 indicated TYPE. If no CLEANUP_POINT_EXPR is necessary, return EXPR
14057 itself. */
14058
14059 tree
14060 fold_build_cleanup_point_expr (tree type, tree expr)
14061 {
14062 /* If the expression does not have side effects then we don't have to wrap
14063 it with a cleanup point expression. */
14064 if (!TREE_SIDE_EFFECTS (expr))
14065 return expr;
14066
14067 /* If the expression is a return, check to see if the expression inside the
14068 return has no side effects or the right hand side of the modify expression
14069 inside the return. If either don't have side effects set we don't need to
14070 wrap the expression in a cleanup point expression. Note we don't check the
14071 left hand side of the modify because it should always be a return decl. */
14072 if (TREE_CODE (expr) == RETURN_EXPR)
14073 {
14074 tree op = TREE_OPERAND (expr, 0);
14075 if (!op || !TREE_SIDE_EFFECTS (op))
14076 return expr;
14077 op = TREE_OPERAND (op, 1);
14078 if (!TREE_SIDE_EFFECTS (op))
14079 return expr;
14080 }
14081
14082 return build1 (CLEANUP_POINT_EXPR, type, expr);
14083 }
14084
14085 /* Given a pointer value OP0 and a type TYPE, return a simplified version
14086 of an indirection through OP0, or NULL_TREE if no simplification is
14087 possible. */
14088
14089 tree
14090 fold_indirect_ref_1 (location_t loc, tree type, tree op0)
14091 {
14092 tree sub = op0;
14093 tree subtype;
14094
14095 STRIP_NOPS (sub);
14096 subtype = TREE_TYPE (sub);
14097 if (!POINTER_TYPE_P (subtype))
14098 return NULL_TREE;
14099
14100 if (TREE_CODE (sub) == ADDR_EXPR)
14101 {
14102 tree op = TREE_OPERAND (sub, 0);
14103 tree optype = TREE_TYPE (op);
14104 /* *&CONST_DECL -> to the value of the const decl. */
14105 if (TREE_CODE (op) == CONST_DECL)
14106 return DECL_INITIAL (op);
14107 /* *&p => p; make sure to handle *&"str"[cst] here. */
14108 if (type == optype)
14109 {
14110 tree fop = fold_read_from_constant_string (op);
14111 if (fop)
14112 return fop;
14113 else
14114 return op;
14115 }
14116 /* *(foo *)&fooarray => fooarray[0] */
14117 else if (TREE_CODE (optype) == ARRAY_TYPE
14118 && type == TREE_TYPE (optype)
14119 && (!in_gimple_form
14120 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14121 {
14122 tree type_domain = TYPE_DOMAIN (optype);
14123 tree min_val = size_zero_node;
14124 if (type_domain && TYPE_MIN_VALUE (type_domain))
14125 min_val = TYPE_MIN_VALUE (type_domain);
14126 if (in_gimple_form
14127 && TREE_CODE (min_val) != INTEGER_CST)
14128 return NULL_TREE;
14129 return build4_loc (loc, ARRAY_REF, type, op, min_val,
14130 NULL_TREE, NULL_TREE);
14131 }
14132 /* *(foo *)&complexfoo => __real__ complexfoo */
14133 else if (TREE_CODE (optype) == COMPLEX_TYPE
14134 && type == TREE_TYPE (optype))
14135 return fold_build1_loc (loc, REALPART_EXPR, type, op);
14136 /* *(foo *)&vectorfoo => BIT_FIELD_REF<vectorfoo,...> */
14137 else if (TREE_CODE (optype) == VECTOR_TYPE
14138 && type == TREE_TYPE (optype))
14139 {
14140 tree part_width = TYPE_SIZE (type);
14141 tree index = bitsize_int (0);
14142 return fold_build3_loc (loc, BIT_FIELD_REF, type, op, part_width, index);
14143 }
14144 }
14145
14146 if (TREE_CODE (sub) == POINTER_PLUS_EXPR
14147 && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
14148 {
14149 tree op00 = TREE_OPERAND (sub, 0);
14150 tree op01 = TREE_OPERAND (sub, 1);
14151
14152 STRIP_NOPS (op00);
14153 if (TREE_CODE (op00) == ADDR_EXPR)
14154 {
14155 tree op00type;
14156 op00 = TREE_OPERAND (op00, 0);
14157 op00type = TREE_TYPE (op00);
14158
14159 /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
14160 if (TREE_CODE (op00type) == VECTOR_TYPE
14161 && type == TREE_TYPE (op00type))
14162 {
14163 HOST_WIDE_INT offset = tree_to_shwi (op01);
14164 tree part_width = TYPE_SIZE (type);
14165 unsigned HOST_WIDE_INT part_widthi = tree_to_shwi (part_width)/BITS_PER_UNIT;
14166 unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
14167 tree index = bitsize_int (indexi);
14168
14169 if (offset / part_widthi < TYPE_VECTOR_SUBPARTS (op00type))
14170 return fold_build3_loc (loc,
14171 BIT_FIELD_REF, type, op00,
14172 part_width, index);
14173
14174 }
14175 /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
14176 else if (TREE_CODE (op00type) == COMPLEX_TYPE
14177 && type == TREE_TYPE (op00type))
14178 {
14179 tree size = TYPE_SIZE_UNIT (type);
14180 if (tree_int_cst_equal (size, op01))
14181 return fold_build1_loc (loc, IMAGPART_EXPR, type, op00);
14182 }
14183 /* ((foo *)&fooarray)[1] => fooarray[1] */
14184 else if (TREE_CODE (op00type) == ARRAY_TYPE
14185 && type == TREE_TYPE (op00type))
14186 {
14187 tree type_domain = TYPE_DOMAIN (op00type);
14188 tree min_val = size_zero_node;
14189 if (type_domain && TYPE_MIN_VALUE (type_domain))
14190 min_val = TYPE_MIN_VALUE (type_domain);
14191 op01 = size_binop_loc (loc, EXACT_DIV_EXPR, op01,
14192 TYPE_SIZE_UNIT (type));
14193 op01 = size_binop_loc (loc, PLUS_EXPR, op01, min_val);
14194 return build4_loc (loc, ARRAY_REF, type, op00, op01,
14195 NULL_TREE, NULL_TREE);
14196 }
14197 }
14198 }
14199
14200 /* *(foo *)fooarrptr => (*fooarrptr)[0] */
14201 if (TREE_CODE (TREE_TYPE (subtype)) == ARRAY_TYPE
14202 && type == TREE_TYPE (TREE_TYPE (subtype))
14203 && (!in_gimple_form
14204 || TREE_CODE (TYPE_SIZE (type)) == INTEGER_CST))
14205 {
14206 tree type_domain;
14207 tree min_val = size_zero_node;
14208 sub = build_fold_indirect_ref_loc (loc, sub);
14209 type_domain = TYPE_DOMAIN (TREE_TYPE (sub));
14210 if (type_domain && TYPE_MIN_VALUE (type_domain))
14211 min_val = TYPE_MIN_VALUE (type_domain);
14212 if (in_gimple_form
14213 && TREE_CODE (min_val) != INTEGER_CST)
14214 return NULL_TREE;
14215 return build4_loc (loc, ARRAY_REF, type, sub, min_val, NULL_TREE,
14216 NULL_TREE);
14217 }
14218
14219 return NULL_TREE;
14220 }
14221
14222 /* Builds an expression for an indirection through T, simplifying some
14223 cases. */
14224
14225 tree
14226 build_fold_indirect_ref_loc (location_t loc, tree t)
14227 {
14228 tree type = TREE_TYPE (TREE_TYPE (t));
14229 tree sub = fold_indirect_ref_1 (loc, type, t);
14230
14231 if (sub)
14232 return sub;
14233
14234 return build1_loc (loc, INDIRECT_REF, type, t);
14235 }
14236
14237 /* Given an INDIRECT_REF T, return either T or a simplified version. */
14238
14239 tree
14240 fold_indirect_ref_loc (location_t loc, tree t)
14241 {
14242 tree sub = fold_indirect_ref_1 (loc, TREE_TYPE (t), TREE_OPERAND (t, 0));
14243
14244 if (sub)
14245 return sub;
14246 else
14247 return t;
14248 }
14249
14250 /* Strip non-trapping, non-side-effecting tree nodes from an expression
14251 whose result is ignored. The type of the returned tree need not be
14252 the same as the original expression. */
14253
14254 tree
14255 fold_ignored_result (tree t)
14256 {
14257 if (!TREE_SIDE_EFFECTS (t))
14258 return integer_zero_node;
14259
14260 for (;;)
14261 switch (TREE_CODE_CLASS (TREE_CODE (t)))
14262 {
14263 case tcc_unary:
14264 t = TREE_OPERAND (t, 0);
14265 break;
14266
14267 case tcc_binary:
14268 case tcc_comparison:
14269 if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14270 t = TREE_OPERAND (t, 0);
14271 else if (!TREE_SIDE_EFFECTS (TREE_OPERAND (t, 0)))
14272 t = TREE_OPERAND (t, 1);
14273 else
14274 return t;
14275 break;
14276
14277 case tcc_expression:
14278 switch (TREE_CODE (t))
14279 {
14280 case COMPOUND_EXPR:
14281 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1)))
14282 return t;
14283 t = TREE_OPERAND (t, 0);
14284 break;
14285
14286 case COND_EXPR:
14287 if (TREE_SIDE_EFFECTS (TREE_OPERAND (t, 1))
14288 || TREE_SIDE_EFFECTS (TREE_OPERAND (t, 2)))
14289 return t;
14290 t = TREE_OPERAND (t, 0);
14291 break;
14292
14293 default:
14294 return t;
14295 }
14296 break;
14297
14298 default:
14299 return t;
14300 }
14301 }
14302
14303 /* Return the value of VALUE, rounded up to a multiple of DIVISOR. */
14304
14305 tree
14306 round_up_loc (location_t loc, tree value, unsigned int divisor)
14307 {
14308 tree div = NULL_TREE;
14309
14310 if (divisor == 1)
14311 return value;
14312
14313 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14314 have to do anything. Only do this when we are not given a const,
14315 because in that case, this check is more expensive than just
14316 doing it. */
14317 if (TREE_CODE (value) != INTEGER_CST)
14318 {
14319 div = build_int_cst (TREE_TYPE (value), divisor);
14320
14321 if (multiple_of_p (TREE_TYPE (value), value, div))
14322 return value;
14323 }
14324
14325 /* If divisor is a power of two, simplify this to bit manipulation. */
14326 if (divisor == (divisor & -divisor))
14327 {
14328 if (TREE_CODE (value) == INTEGER_CST)
14329 {
14330 wide_int val = value;
14331 bool overflow_p;
14332
14333 if ((val & (divisor - 1)) == 0)
14334 return value;
14335
14336 overflow_p = TREE_OVERFLOW (value);
14337 val += divisor - 1;
14338 val &= - (int) divisor;
14339 if (val == 0)
14340 overflow_p = true;
14341
14342 return force_fit_type (TREE_TYPE (value), val, -1, overflow_p);
14343 }
14344 else
14345 {
14346 tree t;
14347
14348 t = build_int_cst (TREE_TYPE (value), divisor - 1);
14349 value = size_binop_loc (loc, PLUS_EXPR, value, t);
14350 t = build_int_cst (TREE_TYPE (value), - (int) divisor);
14351 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14352 }
14353 }
14354 else
14355 {
14356 if (!div)
14357 div = build_int_cst (TREE_TYPE (value), divisor);
14358 value = size_binop_loc (loc, CEIL_DIV_EXPR, value, div);
14359 value = size_binop_loc (loc, MULT_EXPR, value, div);
14360 }
14361
14362 return value;
14363 }
14364
14365 /* Likewise, but round down. */
14366
14367 tree
14368 round_down_loc (location_t loc, tree value, int divisor)
14369 {
14370 tree div = NULL_TREE;
14371
14372 gcc_assert (divisor > 0);
14373 if (divisor == 1)
14374 return value;
14375
14376 /* See if VALUE is already a multiple of DIVISOR. If so, we don't
14377 have to do anything. Only do this when we are not given a const,
14378 because in that case, this check is more expensive than just
14379 doing it. */
14380 if (TREE_CODE (value) != INTEGER_CST)
14381 {
14382 div = build_int_cst (TREE_TYPE (value), divisor);
14383
14384 if (multiple_of_p (TREE_TYPE (value), value, div))
14385 return value;
14386 }
14387
14388 /* If divisor is a power of two, simplify this to bit manipulation. */
14389 if (divisor == (divisor & -divisor))
14390 {
14391 tree t;
14392
14393 t = build_int_cst (TREE_TYPE (value), -divisor);
14394 value = size_binop_loc (loc, BIT_AND_EXPR, value, t);
14395 }
14396 else
14397 {
14398 if (!div)
14399 div = build_int_cst (TREE_TYPE (value), divisor);
14400 value = size_binop_loc (loc, FLOOR_DIV_EXPR, value, div);
14401 value = size_binop_loc (loc, MULT_EXPR, value, div);
14402 }
14403
14404 return value;
14405 }
14406
14407 /* Returns the pointer to the base of the object addressed by EXP and
14408 extracts the information about the offset of the access, storing it
14409 to PBITPOS and POFFSET. */
14410
14411 static tree
14412 split_address_to_core_and_offset (tree exp,
14413 HOST_WIDE_INT *pbitpos, tree *poffset)
14414 {
14415 tree core;
14416 machine_mode mode;
14417 int unsignedp, reversep, volatilep;
14418 HOST_WIDE_INT bitsize;
14419 location_t loc = EXPR_LOCATION (exp);
14420
14421 if (TREE_CODE (exp) == ADDR_EXPR)
14422 {
14423 core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
14424 poffset, &mode, &unsignedp, &reversep,
14425 &volatilep, false);
14426 core = build_fold_addr_expr_loc (loc, core);
14427 }
14428 else
14429 {
14430 core = exp;
14431 *pbitpos = 0;
14432 *poffset = NULL_TREE;
14433 }
14434
14435 return core;
14436 }
14437
14438 /* Returns true if addresses of E1 and E2 differ by a constant, false
14439 otherwise. If they do, E1 - E2 is stored in *DIFF. */
14440
14441 bool
14442 ptr_difference_const (tree e1, tree e2, HOST_WIDE_INT *diff)
14443 {
14444 tree core1, core2;
14445 HOST_WIDE_INT bitpos1, bitpos2;
14446 tree toffset1, toffset2, tdiff, type;
14447
14448 core1 = split_address_to_core_and_offset (e1, &bitpos1, &toffset1);
14449 core2 = split_address_to_core_and_offset (e2, &bitpos2, &toffset2);
14450
14451 if (bitpos1 % BITS_PER_UNIT != 0
14452 || bitpos2 % BITS_PER_UNIT != 0
14453 || !operand_equal_p (core1, core2, 0))
14454 return false;
14455
14456 if (toffset1 && toffset2)
14457 {
14458 type = TREE_TYPE (toffset1);
14459 if (type != TREE_TYPE (toffset2))
14460 toffset2 = fold_convert (type, toffset2);
14461
14462 tdiff = fold_build2 (MINUS_EXPR, type, toffset1, toffset2);
14463 if (!cst_and_fits_in_hwi (tdiff))
14464 return false;
14465
14466 *diff = int_cst_value (tdiff);
14467 }
14468 else if (toffset1 || toffset2)
14469 {
14470 /* If only one of the offsets is non-constant, the difference cannot
14471 be a constant. */
14472 return false;
14473 }
14474 else
14475 *diff = 0;
14476
14477 *diff += (bitpos1 - bitpos2) / BITS_PER_UNIT;
14478 return true;
14479 }
14480
14481 /* Return OFF converted to a pointer offset type suitable as offset for
14482 POINTER_PLUS_EXPR. Use location LOC for this conversion. */
14483 tree
14484 convert_to_ptrofftype_loc (location_t loc, tree off)
14485 {
14486 return fold_convert_loc (loc, sizetype, off);
14487 }
14488
14489 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14490 tree
14491 fold_build_pointer_plus_loc (location_t loc, tree ptr, tree off)
14492 {
14493 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14494 ptr, convert_to_ptrofftype_loc (loc, off));
14495 }
14496
14497 /* Build and fold a POINTER_PLUS_EXPR at LOC offsetting PTR by OFF. */
14498 tree
14499 fold_build_pointer_plus_hwi_loc (location_t loc, tree ptr, HOST_WIDE_INT off)
14500 {
14501 return fold_build2_loc (loc, POINTER_PLUS_EXPR, TREE_TYPE (ptr),
14502 ptr, size_int (off));
14503 }
14504
14505 /* Return a char pointer for a C string if it is a string constant
14506 or sum of string constant and integer constant. */
14507
14508 const char *
14509 c_getstr (tree src)
14510 {
14511 tree offset_node;
14512
14513 src = string_constant (src, &offset_node);
14514 if (src == 0)
14515 return 0;
14516
14517 if (offset_node == 0)
14518 return TREE_STRING_POINTER (src);
14519 else if (!tree_fits_uhwi_p (offset_node)
14520 || compare_tree_int (offset_node, TREE_STRING_LENGTH (src) - 1) > 0)
14521 return 0;
14522
14523 return TREE_STRING_POINTER (src) + tree_to_uhwi (offset_node);
14524 }